text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import fs from "fs";
import path from "path";
import csv from "csvtojson";
import useGqlHandler from "./useGqlHandler";
import { fields, formSubmissionDataA, formSubmissionDataB } from "./mocks/form.mocks";
jest.setTimeout(100000);
describe('Form Builder "Form" Test', () => {
const {
until,
install,
installFileManager,
createForm,
deleteForm,
updateRevision,
publishRevision,
unpublishRevision,
deleteRevision,
createRevisionFrom,
saveFormView,
getForm,
getFormRevisions,
listForms,
getPublishedForm,
createFormSubmission,
listFormSubmissions,
exportFormSubmissions,
defaultIdentity
} = useGqlHandler();
beforeEach(async () => {
try {
// Run FB installer
await install();
// Run FM installer (we'll need to have FM settings to perform submissions export)
await installFileManager({ srcPrefix: "https://some.domain.com/files/" });
} catch (e) {
console.log(e);
}
});
test("should create a form and return it in the list of latest forms", async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
expect(id.split("#")[1]).toBe("0001");
expect(create).toMatchObject({
data: {
formBuilder: {
createForm: {
data: {
id: expect.any(String),
createdOn: /^20/,
savedOn: /^20/,
status: "draft",
createdBy: defaultIdentity,
ownedBy: defaultIdentity
},
error: null
}
}
}
});
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data.length > 0
);
const [list] = await listForms();
const { data } = list.data.formBuilder.listForms;
expect(data.length).toBe(1);
expect(data[0].id).toEqual(id);
});
test("should update form and return new data from storage", async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
const newData = {
name: "New name",
layout: [["QIspyfQRx", "AVoKqyAuH"], ["fNJag3ZdX"]]
};
const [update] = await updateRevision({ revision: id, data: newData });
expect(update.data.formBuilder.updateRevision.data).toMatchObject(newData);
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data[0].name === newData.name,
{
name: "list forms after update revision"
}
);
const [get] = await getForm({ revision: id });
expect(get.data.formBuilder.getForm.data).toMatchObject(newData);
const [list] = await listForms();
const { data } = list.data.formBuilder.listForms;
expect(data.length).toBe(1);
expect(data[0].name).toEqual(newData.name);
});
test(`should correctly update the "latest" revision when a revision is deleted`, async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data.length > 0,
{
name: "after create form"
}
);
// Create 2 new revisions
const [create2] = await createRevisionFrom({ revision: id });
const { id: id2 } = create2.data.formBuilder.createRevisionFrom.data;
const [create3] = await createRevisionFrom({ revision: id });
const { id: id3 } = create3.data.formBuilder.createRevisionFrom.data;
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data[0].id === id3,
{
name: "after create revisions"
}
);
const [list] = await listForms();
const { data: data1 } = list.data.formBuilder.listForms;
expect(data1.length).toBe(1);
expect(data1[0].id).toEqual(id3);
// Delete latest revision
const [deleteRevisionResponse] = await deleteRevision({ revision: id3 });
expect(deleteRevisionResponse).toEqual({
data: {
formBuilder: {
deleteRevision: {
data: true,
error: null
}
}
}
});
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data[0].id === id2,
{
name: "after delete revision 3"
}
);
// Make sure revision #2 is now "latest"
const [list2] = await listForms();
const { data: data2 } = list2.data.formBuilder.listForms;
expect(data2.length).toBe(1);
expect(data2[0].id).toEqual(id2);
// Delete revision #1; Revision #2 should still be "latest"
const [deleteRevision1Response] = await deleteRevision({ revision: id });
expect(deleteRevision1Response).toEqual({
data: {
formBuilder: {
deleteRevision: {
data: true,
error: null
}
}
}
});
// Get revisions #2 and verify it's the only remaining revision of this form
const [get] = await getFormRevisions({ id: id2 });
const { data: revisions } = get.data.formBuilder.getFormRevisions;
expect(revisions.length).toBe(1);
expect(revisions[0].id).toEqual(id2);
expect(revisions[0].version).toEqual(2);
});
test("should delete a form and all of its revisions", async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
// Create 2 new revisions
await createRevisionFrom({ revision: id });
await createRevisionFrom({ revision: id });
// Delete the whole form
await deleteForm({ id });
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data.length === 0,
{
name: "list after delete form"
}
);
const [get] = await getForm({ revision: id });
expect(get.data.formBuilder.getForm.data).toBe(null);
const [list] = await listForms();
expect(list.data.formBuilder.listForms.data.length).toBe(0);
});
test("should publish, add views and unpublish", async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
// Publish revision #1
await publishRevision({ revision: id });
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data[0].id === id,
{
name: "list forms after publish revision"
}
);
// Get the published form
const [{ data: get }] = await getPublishedForm({ revision: id });
expect(get.formBuilder.getPublishedForm.data.id).toEqual(id);
// Create a new revision
const [create2] = await createRevisionFrom({ revision: id });
const { id: id2 } = create2.data.formBuilder.createRevisionFrom.data;
await until(
() => listForms().then(([data]) => data),
({ data }: any) => data.formBuilder.listForms.data[0].id === id2
);
// Latest published form should still be #1
const [latestPublished] = await getPublishedForm({ parent: id.split("#")[0] });
expect(latestPublished.data.formBuilder.getPublishedForm.data.id).toEqual(id);
// Latest revision should be #2
const [list] = await listForms();
const { data } = list.data.formBuilder.listForms;
expect(data.length).toBe(1);
expect(data[0].id).toEqual(id2);
// Increment views for #1
await saveFormView({ revision: id });
await saveFormView({ revision: id });
await saveFormView({ revision: id });
// Verify stats for #1
const [{ data: get2 }] = await getForm({ revision: id });
expect(get2.formBuilder.getForm.data.stats.views).toEqual(3);
// Publish revision #2
await publishRevision({ revision: id2 });
// Latest published form should now be #2
const [latestPublished2] = await getPublishedForm({ parent: id.split("#")[0] });
expect(latestPublished2.data.formBuilder.getPublishedForm.data.id).toEqual(id2);
// Increment views for #2
await saveFormView({ revision: id2 });
await saveFormView({ revision: id2 });
// Verify stats for #2
const [{ data: get3 }] = await getForm({ revision: id2 });
expect(get3.formBuilder.getForm.data.stats.views).toEqual(2);
// Verify overall stats
expect(get3.formBuilder.getForm.data.overallStats.views).toEqual(5);
// Unpublish #2
await unpublishRevision({ revision: id2 });
// Latest published form should now again be #1
const [latestPublished3] = await getPublishedForm({ parent: id.split("#")[0] });
expect(latestPublished3.data.formBuilder.getPublishedForm.data.id).toEqual(id);
});
test("should create, list and export submissions to file", async () => {
const [create] = await createForm({ data: { name: "contact-us" } });
const { id } = create.data.formBuilder.createForm.data;
// Add fields definitions
await updateRevision({ revision: id, data: { fields } });
await publishRevision({ revision: id });
await new Promise(res => setTimeout(res, 2000));
// Create form submissions
const [createSubmission1Response] = await createFormSubmission({
revision: id,
data: formSubmissionDataA.data,
meta: formSubmissionDataA.meta
});
expect(createSubmission1Response).toMatchObject({
data: {
formBuilder: {
createFormSubmission: {
data: expect.any(Object),
error: null
}
}
}
});
await new Promise(res => setTimeout(res, 2000));
const [createSubmission2Response] = await createFormSubmission({
revision: id,
data: formSubmissionDataB.data,
meta: formSubmissionDataB.meta
});
expect(createSubmission2Response).toMatchObject({
data: {
formBuilder: {
createFormSubmission: {
data: expect.any(Object),
error: null
}
}
}
});
await until(
() => listFormSubmissions({ form: id, sort: "savedOn_ASC" }).then(([data]) => data),
({ data }: any) => data.formBuilder.listFormSubmissions.data.length === 2,
{
name: "after create submission"
}
);
// Load submissions
const [submissions] = await listFormSubmissions({ form: id, sort: ["createdOn_ASC"] });
const list = submissions.data.formBuilder.listFormSubmissions;
expect(list.data.length).toBe(2);
expect(list.meta.totalCount).toBe(2);
// Export submissions
const [exportCSV] = await exportFormSubmissions({ form: id });
expect(exportCSV).toMatchObject({
data: {
formBuilder: {
exportFormSubmissions: {
data: expect.any(Object),
error: null
}
}
}
});
const { data } = exportCSV.data.formBuilder.exportFormSubmissions;
expect(data).toMatchObject({
src: `https://some.domain.com/files/form_submissions_export.csv`,
key: "form_submissions_export.csv"
});
// Parse CSV and verify there are 2 submissions
const csvFile = path.join(__dirname, data.key);
const json = await csv({ output: "csv" }).fromFile(csvFile);
expect(json.length).toBe(2);
expect(json[0].sort()).toEqual(Object.values(formSubmissionDataB.data).sort());
expect(json[1].sort()).toEqual(Object.values(formSubmissionDataA.data).sort());
fs.unlinkSync(csvFile);
});
it("should create multiple forms and delete a single one - rest should be visible", async () => {
/**
* First we create three forms.
*/
const [createForm1Response] = await createForm({
data: {
name: "form 1"
}
});
expect(createForm1Response).toMatchObject({
data: {
formBuilder: {
createForm: {
data: {
name: "form 1"
},
error: null
}
}
}
});
const form1 = createForm1Response.data.formBuilder.createForm.data;
const [createForm2Response] = await createForm({
data: {
name: "form 2"
}
});
expect(createForm2Response).toMatchObject({
data: {
formBuilder: {
createForm: {
data: {
name: "form 2"
},
error: null
}
}
}
});
const form2 = createForm2Response.data.formBuilder.createForm.data;
const [createForm3Response] = await createForm({
data: {
name: "form 3"
}
});
expect(createForm3Response).toMatchObject({
data: {
formBuilder: {
createForm: {
data: {
name: "form 3"
},
error: null
}
}
}
});
const form3 = createForm3Response.data.formBuilder.createForm.data;
await until(
() => listForms().then(([data]) => data),
({ data }: any) => {
return (data.formBuilder.listForms.data as any[]).every(form => {
return [form1.id, form2.id, form3.id].includes(form.id);
});
}
);
/**
* Publish form 2 so we can create new revision.
*/
const [publishForm2Response] = await publishRevision({
revision: form2.id
});
expect(publishForm2Response).toMatchObject({
data: {
formBuilder: {
publishRevision: {
data: {
name: "form 2",
published: true,
stats: {
submissions: 0,
views: 0
},
status: "published",
version: 1
},
error: null
}
}
}
});
/**
* We need a new revision of form 2
*/
const [createRevisionForm2Response] = await createRevisionFrom({
revision: form2.id
});
expect(createRevisionForm2Response).toMatchObject({
data: {
formBuilder: {
createRevisionFrom: {
data: {
id: `${form2.formId}#0002`,
version: 2,
published: false,
status: "draft"
},
error: null
}
}
}
});
const [deleteForm2Response] = await deleteForm({
id: form2.id
});
expect(deleteForm2Response).toEqual({
data: {
formBuilder: {
deleteForm: {
data: true,
error: null
}
}
}
});
/**
* We must have form 1 and form 3
*/
await until(
() => listForms().then(([data]) => data),
({ data }: any) => {
return (data.formBuilder.listForms.data as any[]).every(form => {
return [form1.id, form3.id].includes(form.id);
});
}
);
/**
* Publish form 1 so we can create new revision.
*/
const [publishForm1Response] = await publishRevision({
revision: form1.id
});
expect(publishForm1Response).toMatchObject({
data: {
formBuilder: {
publishRevision: {
data: {
name: "form 1",
published: true,
stats: {
submissions: 0,
views: 0
},
status: "published",
version: 1
},
error: null
}
}
}
});
/**
* Create new revision of the first form.
*/
const [createRevisionForm1Response] = await createRevisionFrom({
revision: form1.id
});
expect(createRevisionForm1Response).toMatchObject({
data: {
formBuilder: {
createRevisionFrom: {
data: {
id: `${form1.formId}#0002`,
version: 2,
published: false,
status: "draft"
},
error: null
}
}
}
});
/**
* Publish it and then create new revision again.
*/
const [publishForm1ResponseSecond] = await publishRevision({
revision: `${form1.formId}#0002`
});
expect(publishForm1ResponseSecond).toMatchObject({
data: {
formBuilder: {
publishRevision: {
data: {
name: "form 1",
published: true,
stats: {
submissions: 0,
views: 0
},
status: "published",
version: 2
},
error: null
}
}
}
});
const [createRevisionForm1ResponseSecond] = await createRevisionFrom({
revision: `${form1.formId}#0002`
});
expect(createRevisionForm1ResponseSecond).toMatchObject({
data: {
formBuilder: {
createRevisionFrom: {
data: {
id: `${form1.formId}#0003`,
version: 3,
published: false,
status: "draft"
},
error: null
}
}
}
});
/**
* Delete the last revision of the form.
*/
const [deleteRevisionForm1Response] = await deleteRevision({
revision: `${form1.formId}#0003`
});
expect(deleteRevisionForm1Response).toMatchObject({
data: {
formBuilder: {
deleteRevision: {
data: true,
error: null
}
}
}
});
/**
* We must have form 1 and form 3
*/
await until(
() => listForms().then(([data]) => data),
({ data }: any) => {
return (data.formBuilder.listForms.data as any[]).every(form => {
return [`${form1.formId}#0002`, form3.id].includes(form.id);
});
}
);
});
it("should properly sort form revisions", async () => {
const name = "test form";
const [formResponse] = await createForm({
data: {
name
}
});
expect(formResponse).toMatchObject({
data: {
formBuilder: {
createForm: {
data: {
name
},
error: null
}
}
}
});
const form = formResponse.data.formBuilder.createForm.data;
const revisions: string[] = [form.id];
const total = 25;
/**
* Now we need to create 20+ revisions
*/
for (let i = revisions.length; i < total; i++) {
const prev = revisions[i - 1];
const [createRevisionResponse] = await createRevisionFrom({
revision: prev
});
expect(createRevisionResponse).toMatchObject({
data: {
formBuilder: {
createRevisionFrom: {
data: {
name,
version: i + 1
},
error: null
}
}
}
});
revisions.push(createRevisionResponse.data.formBuilder.createRevisionFrom.data.id);
}
expect(revisions).toHaveLength(total);
await until(
() =>
getFormRevisions({
id: form.id
}).then(([data]) => data),
({ data }: any) => {
return data.formBuilder.getFormRevisions.data.length === revisions.length;
}
);
const [listRevisionsResponse] = await getFormRevisions({
id: form.id
});
expect(listRevisionsResponse).toMatchObject({
data: {
formBuilder: {
getFormRevisions: {
data: revisions.map(rev => {
return {
id: rev
};
}),
error: null
}
}
}
});
});
}); | the_stack |
import type { ITelemetryLogger } from "@fluidframework/common-definitions";
import {
assert,
stringToBuffer,
Uint8ArrayToString,
} from "@fluidframework/common-utils";
import {
IDocumentStorageService,
ISummaryContext,
IDocumentStorageServicePolicies,
LoaderCachingPolicy,
} from "@fluidframework/driver-definitions";
import { buildHierarchy } from "@fluidframework/protocol-base";
import {
ICreateBlobResponse,
ISnapshotTree,
ISnapshotTreeEx,
ISummaryHandle,
ISummaryTree,
ITree,
IVersion,
} from "@fluidframework/protocol-definitions";
import {
convertWholeFlatSummaryToSnapshotTreeAndBlobs,
GitManager,
ISummaryUploadManager,
SummaryTreeUploadManager,
WholeSummaryUploadManager,
} from "@fluidframework/server-services-client";
import { PerformanceEvent } from "@fluidframework/telemetry-utils";
import { DocumentStorageServiceProxy, PrefetchDocumentStorageService } from "@fluidframework/driver-utils";
import { RetriableGitManager } from "./retriableGitManager";
import { IRouterliciousDriverPolicies } from "./policies";
import { ICache, InMemoryCache } from "./cache";
/**
* Document access to underlying storage for routerlicious driver.
* Uploads summaries piece-by-piece traversing the tree recursively.
* Downloads summaries
*/
class ShreddedSummaryDocumentStorageService implements IDocumentStorageService {
// The values of this cache is useless. We only need the keys. So we are always putting
// empty strings as values.
protected readonly blobsShaCache = new Map<string, string>();
private readonly summaryUploadManager: ISummaryUploadManager;
public get repositoryUrl(): string {
return "";
}
constructor(
protected readonly id: string,
protected readonly manager: GitManager,
protected readonly logger: ITelemetryLogger,
public readonly policies: IDocumentStorageServicePolicies = {}) {
this.summaryUploadManager = new SummaryTreeUploadManager(
new RetriableGitManager(manager, logger),
this.blobsShaCache,
this.getPreviousFullSnapshot.bind(this),
);
}
public async getVersions(versionId: string, count: number): Promise<IVersion[]> {
const id = versionId ? versionId : this.id;
const commits = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "getVersions",
versionId: id,
count,
},
async () => this.manager.getCommits(id, count),
);
return commits.map((commit) => ({
date: commit.commit.author.date,
id: commit.sha,
treeId: commit.commit.tree.sha,
}));
}
public async getSnapshotTree(version?: IVersion): Promise<ISnapshotTreeEx | null> {
let requestVersion = version;
if (!requestVersion) {
const versions = await this.getVersions(this.id, 1);
if (versions.length === 0) {
return null;
}
requestVersion = versions[0];
}
const rawTree = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "getSnapshotTree",
treeId: requestVersion.treeId,
},
async (event) => {
const response = await this.manager.getTree(requestVersion!.treeId);
event.end({
size: response.tree.length,
});
return response;
},
);
const tree = buildHierarchy(rawTree, this.blobsShaCache, true);
return tree;
}
public async readBlob(blobId: string): Promise<ArrayBufferLike> {
const value = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "readBlob",
blobId,
},
async (event) => {
const response = await this.manager.getBlob(blobId);
event.end({
size: response.size,
});
return response;
},
);
this.blobsShaCache.set(value.sha, "");
return stringToBuffer(value.content, value.encoding);
}
public async write(tree: ITree, parents: string[], message: string, ref: string): Promise<IVersion> {
const branch = ref ? `datastores/${this.id}/${ref}` : this.id;
const commit = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "write",
id: branch,
},
async () => this.manager.write(branch, tree, parents, message),
);
return { date: commit.committer.date, id: commit.sha, treeId: commit.tree.sha };
}
public async uploadSummaryWithContext(summary: ISummaryTree, context: ISummaryContext): Promise<string> {
const summaryHandle = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "uploadSummaryWithContext",
},
async () => this.summaryUploadManager.writeSummaryTree(summary, context.ackHandle ?? "", "channel"),
);
return summaryHandle;
}
public async downloadSummary(handle: ISummaryHandle): Promise<ISummaryTree> {
throw new Error("NOT IMPLEMENTED!");
}
public async createBlob(file: ArrayBufferLike): Promise<ICreateBlobResponse> {
const uint8ArrayFile = new Uint8Array(file);
return PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "createBlob",
size: uint8ArrayFile.length,
},
async (event) => {
const response = await this.manager.createBlob(
Uint8ArrayToString(
uint8ArrayFile, "base64"),
"base64").then((r) => ({ id: r.sha, url: r.url }));
event.end({
blobId: response.id,
});
return response;
},
);
}
private async getPreviousFullSnapshot(parentHandle: string): Promise<ISnapshotTreeEx | null | undefined> {
return parentHandle
? this.getVersions(parentHandle, 1)
.then(async (versions) => {
// Clear the cache as the getSnapshotTree call will fill the cache.
this.blobsShaCache.clear();
return this.getSnapshotTree(versions[0]);
})
: undefined;
}
}
const latestSnapshotId: string = "latest";
class WholeSummaryDocumentStorageService implements IDocumentStorageService {
private readonly summaryUploadManager: ISummaryUploadManager;
private firstVersionsCall: boolean = true;
public get repositoryUrl(): string {
return "";
}
constructor(
protected readonly id: string,
protected readonly manager: GitManager,
protected readonly logger: ITelemetryLogger,
public readonly policies: IDocumentStorageServicePolicies = {},
private readonly blobCache: ICache<ArrayBufferLike> = new InMemoryCache(),
private readonly snapshotTreeCache: ICache<ISnapshotTree> = new InMemoryCache()) {
this.summaryUploadManager = new WholeSummaryUploadManager(manager);
}
public async getVersions(versionId: string | null, count: number): Promise<IVersion[]> {
if (versionId !== this.id && versionId !== null) {
// Blobs in this scenario will never have multiple versions, so return blobId as is
return [{
id: versionId,
treeId: undefined!,
}];
}
// If this is the first versions call for the document, we know we will want the latest summary.
// Fetch latest summary, cache it, and return its id.
if (this.firstVersionsCall && count === 1) {
this.firstVersionsCall = false;
return [{
id: (await this.fetchAndCacheSnapshotTree(latestSnapshotId)).id,
treeId: undefined!,
}];
}
// Otherwise, get the latest version of the document as normal.
const id = versionId ? versionId : this.id;
const commits = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "getVersions",
versionId: id,
count,
},
async () => this.manager.getCommits(id, count),
);
return commits.map((commit) => ({
date: commit.commit.author.date,
id: commit.sha,
treeId: undefined!,
}));
}
public async getSnapshotTree(version?: IVersion): Promise<ISnapshotTree | null> {
let requestVersion = version;
if (!requestVersion) {
const versions = await this.getVersions(this.id, 1);
if (versions.length === 0) {
return null;
}
requestVersion = versions[0];
}
return (await this.fetchAndCacheSnapshotTree(requestVersion.id)).snapshotTree;
}
public async readBlob(blobId: string): Promise<ArrayBufferLike> {
const cachedBlob = await this.blobCache.get(blobId);
if (cachedBlob !== undefined) {
return cachedBlob;
}
const blob = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "readBlob",
blobId,
},
async (event) => {
const response = await this.manager.getBlob(blobId);
event.end({
size: response.size,
});
return response;
},
);
const bufferValue = stringToBuffer(blob.content, blob.encoding);
await this.blobCache.put(blob.sha, bufferValue);
return bufferValue;
}
public async uploadSummaryWithContext(summary: ISummaryTree, context: ISummaryContext): Promise<string> {
const summaryHandle = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "uploadSummaryWithContext",
},
async () => this.summaryUploadManager.writeSummaryTree(summary, context.ackHandle ?? "", "channel"),
);
return summaryHandle;
}
public async downloadSummary(handle: ISummaryHandle): Promise<ISummaryTree> {
throw new Error("NOT IMPLEMENTED!");
}
public async write(tree: ITree, parents: string[], message: string, ref: string): Promise<IVersion> {
throw new Error("NOT IMPLEMENTED!");
}
public async createBlob(file: ArrayBufferLike): Promise<ICreateBlobResponse> {
const uint8ArrayFile = new Uint8Array(file);
return PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "createBlob",
size: uint8ArrayFile.length,
},
async (event) => {
const response = await this.manager.createBlob(
Uint8ArrayToString(
uint8ArrayFile, "base64"),
"base64").then((r) => ({ id: r.sha, url: r.url }));
event.end({
blobId: response.id,
});
return response;
},
);
}
private async fetchAndCacheSnapshotTree(versionId: string): Promise<{ id: string, snapshotTree: ISnapshotTree }> {
const cachedSnapshotTree = await this.snapshotTreeCache.get(versionId);
if (cachedSnapshotTree !== undefined) {
return { id: versionId, snapshotTree: cachedSnapshotTree };
}
const wholeFlatSummary = await PerformanceEvent.timedExecAsync(
this.logger,
{
eventName: "getWholeFlatSummary",
treeId: versionId,
},
async (event) => {
const response = await this.manager.getSummary(versionId);
event.end({
size: response.trees[0]?.entries.length,
});
return response;
},
);
const normalizedWholeSummary = convertWholeFlatSummaryToSnapshotTreeAndBlobs(wholeFlatSummary);
const snapshotId = normalizedWholeSummary.snapshotTree.id;
assert(snapshotId !== undefined, "Root tree should contain the id");
const cachePs: Promise<any>[] = [
this.snapshotTreeCache.put(
snapshotId,
normalizedWholeSummary.snapshotTree,
),
this.initBlobCache(normalizedWholeSummary.blobs),
];
if (snapshotId !== versionId) {
// versionId could be "latest". When summarizer checks cache for "latest", we want it to be available.
// TODO: For in-memory cache, <latest,snapshotTree> will be a shared pointer with <snapshotId,snapshotTree>,
// However, for something like Redis, this will cache the same value twice. Alternatively, could we simply
// cache with versionId?
cachePs.push(this.snapshotTreeCache.put(
versionId,
normalizedWholeSummary.snapshotTree,
));
}
await Promise.all([
this.snapshotTreeCache.put(
snapshotId,
normalizedWholeSummary.snapshotTree,
),
this.initBlobCache(normalizedWholeSummary.blobs),
]);
return { id: snapshotId, snapshotTree: normalizedWholeSummary.snapshotTree};
}
private async initBlobCache(blobs: Map<string, ArrayBuffer>): Promise<void> {
const blobCachePutPs: Promise<void>[] = [];
blobs.forEach((value, id) => {
blobCachePutPs.push(this.blobCache.put(id, value));
});
await Promise.all(blobCachePutPs);
}
}
export class DocumentStorageService extends DocumentStorageServiceProxy {
private _logTailSha: string | undefined = undefined;
public get logTailSha(): string | undefined {
return this._logTailSha;
}
private static loadInternalDocumentStorageService(
id: string,
manager: GitManager,
logger: ITelemetryLogger,
policies: IDocumentStorageServicePolicies,
driverPolicies?: IRouterliciousDriverPolicies,
blobCache?: ICache<ArrayBufferLike>,
snapshotTreeCache?: ICache<ISnapshotTree>): IDocumentStorageService {
const storageService = driverPolicies?.enableWholeSummaryUpload ?
new WholeSummaryDocumentStorageService(id, manager, logger, policies, blobCache, snapshotTreeCache) :
new ShreddedSummaryDocumentStorageService(id, manager, logger, policies);
// TODO: worth prefetching latest summary making version + snapshot call with WholeSummary storage?
if (!driverPolicies?.enableWholeSummaryUpload && policies.caching === LoaderCachingPolicy.Prefetch) {
return new PrefetchDocumentStorageService(storageService);
}
return storageService;
}
constructor(
public readonly id: string,
public manager: GitManager,
logger: ITelemetryLogger,
policies: IDocumentStorageServicePolicies = {},
driverPolicies?: IRouterliciousDriverPolicies,
blobCache?: ICache<ArrayBufferLike>,
snapshotTreeCache?: ICache<ISnapshotTree>) {
super(DocumentStorageService.loadInternalDocumentStorageService(
id,
manager,
logger,
policies,
driverPolicies,
blobCache,
snapshotTreeCache,
));
}
public async getSnapshotTree(version?: IVersion): Promise<ISnapshotTree | null> {
const tree = await this.internalStorageService.getSnapshotTree(version);
if (tree !== null) {
this._logTailSha = ".logTail" in tree.trees ? tree.trees[".logTail"].blobs.logTail : undefined;
}
return tree;
}
} | the_stack |
import * as path from 'path';
import * as iam from '@aws-cdk/aws-iam';
import * as lambda from '@aws-cdk/aws-lambda';
import * as logs from '@aws-cdk/aws-logs';
import * as s3 from '@aws-cdk/aws-s3';
import * as sfn from '@aws-cdk/aws-stepfunctions';
import { TaskInput } from '@aws-cdk/aws-stepfunctions';
import * as cdk from '@aws-cdk/core';
import * as cr from '@aws-cdk/custom-resources';
import * as awscli from '@aws-cdk/lambda-layer-awscli';
import { Construct } from 'constructs';
import { integrationResourceArn, validatePatternSupported } from '../private/task-utils';
/**
* The props for a EMR Containers StartJobRun Task.
*/
export interface EmrContainersStartJobRunProps extends sfn.TaskStateBaseProps {
/**
* The ID of the virtual cluster where the job will be run
*/
readonly virtualCluster: VirtualClusterInput;
/**
* The name of the job run.
*
* @default - No job run name
*/
readonly jobName?: string;
/**
* The execution role for the job run.
*
* If `virtualClusterId` is from a JSON input path, an execution role must be provided.
* If an execution role is provided, follow the documentation to update the role trust policy.
* @see https://docs.aws.amazon.com/emr/latest/EMR-on-EKS-DevelopmentGuide/setting-up-trust-policy.html
*
* @default - Automatically generated only when the provided `virtualClusterId` is not an encoded JSON path
*/
readonly executionRole?: iam.IRole;
/**
* The Amazon EMR release version to use for the job run.
*/
readonly releaseLabel: ReleaseLabel;
/**
* The configurations for the application running in the job run.
*
* Maximum of 100 items
*
* @see https://docs.aws.amazon.com/emr-on-eks/latest/APIReference/API_Configuration.html
*
* @default - No application config
*/
readonly applicationConfig?: ApplicationConfiguration[];
/**
* The job driver for the job run.
*
* @see https://docs.aws.amazon.com/emr-on-eks/latest/APIReference/API_JobDriver.html
*/
readonly jobDriver: JobDriver;
/**
* Configuration for monitoring the job run
*
* @see https://docs.aws.amazon.com/emr-on-eks/latest/APIReference/API_MonitoringConfiguration.html
*
* @default - logging enabled and resources automatically generated if `monitoring.logging` is set to `true`
*/
readonly monitoring?: Monitoring;
/**
* The tags assigned to job runs.
*
* @default - None
*/
readonly tags?: { [key: string]: string };
}
/**
* Starts a job run.
*
* A job is a unit of work that you submit to Amazon EMR on EKS for execution.
* The work performed by the job can be defined by a Spark jar, PySpark script, or SparkSQL query.
* A job run is an execution of the job on the virtual cluster.
*
* @see https://docs.aws.amazon.com/step-functions/latest/dg/connect-emr-eks.html
*/
export class EmrContainersStartJobRun extends sfn.TaskStateBase implements iam.IGrantable {
private static readonly SUPPORTED_INTEGRATION_PATTERNS: sfn.IntegrationPattern[] = [
sfn.IntegrationPattern.REQUEST_RESPONSE,
sfn.IntegrationPattern.RUN_JOB,
];
protected readonly taskMetrics?: sfn.TaskMetricsConfig;
protected readonly taskPolicies?: iam.PolicyStatement[];
public readonly grantPrincipal: iam.IPrincipal;
private role: iam.IRole;
private readonly logGroup?: logs.ILogGroup;
private readonly logBucket?: s3.IBucket;
private readonly integrationPattern: sfn.IntegrationPattern;
constructor(scope: Construct, id: string, private readonly props: EmrContainersStartJobRunProps) {
super(scope, id, props);
this.integrationPattern = props.integrationPattern ?? sfn.IntegrationPattern.RUN_JOB;
validatePatternSupported(this.integrationPattern, EmrContainersStartJobRun.SUPPORTED_INTEGRATION_PATTERNS);
if (this.props.applicationConfig) {
this.validateAppConfig(this.props.applicationConfig);
}
if (this.props.jobDriver.sparkSubmitJobDriver) {
this.validateSparkSubmitJobDriver(props.jobDriver.sparkSubmitJobDriver);
}
if (this.props.executionRole === undefined
&& sfn.JsonPath.isEncodedJsonPath(props.virtualCluster.id)) {
throw new Error('Execution role cannot be undefined when the virtual cluster ID is not a concrete value. Provide an execution role with the correct trust policy');
}
this.logGroup = this.assignLogGroup();
this.logBucket = this.assignLogBucket();
this.role = this.props.executionRole ?? this.createJobExecutionRole();
this.grantPrincipal = this.role;
this.grantMonitoringPolicies();
this.taskPolicies = this.createPolicyStatements();
}
/**
* @internal
*/
protected _renderTask(): any {
return {
Resource: integrationResourceArn('emr-containers', 'startJobRun', this.integrationPattern),
Parameters: sfn.FieldUtils.renderObject({
VirtualClusterId: this.props.virtualCluster.id,
Name: this.props.jobName,
ExecutionRoleArn: this.role.roleArn,
ReleaseLabel: this.props.releaseLabel.label,
JobDriver: {
SparkSubmitJobDriver: {
EntryPoint: this.props.jobDriver.sparkSubmitJobDriver?.entryPoint.value,
EntryPointArguments: this.props.jobDriver.sparkSubmitJobDriver?.entryPointArguments?.value,
SparkSubmitParameters: this.props.jobDriver.sparkSubmitJobDriver?.sparkSubmitParameters,
},
},
ConfigurationOverrides: {
ApplicationConfiguration: cdk.listMapper(this.applicationConfigPropertyToJson)(this.props.applicationConfig),
MonitoringConfiguration: {
CloudWatchMonitoringConfiguration: this.logGroup ? {
LogGroupName: this.logGroup.logGroupName,
LogStreamNamePrefix: this.props.monitoring!.logStreamNamePrefix,
} : undefined,
PersistentAppUI: (this.props.monitoring?.persistentAppUI === false)
? 'DISABLED'
: 'ENABLED',
S3MonitoringConfiguration: this.logBucket ? {
LogUri: this.logBucket.s3UrlForObject(),
} : undefined,
},
},
Tags: this.props.tags,
}),
};
}
/**
* Render the EMR Containers ConfigurationProperty as JSON
*/
private applicationConfigPropertyToJson = (property: ApplicationConfiguration) => {
return {
Classification: cdk.stringToCloudFormation(property.classification.classificationStatement),
Properties: property.properties ? cdk.objectToCloudFormation(property.properties) : undefined,
Configurations: property.nestedConfig ? cdk.listMapper(this.applicationConfigPropertyToJson)(property.nestedConfig) : undefined,
};
}
private validateAppConfigPropertiesLength(appConfig: ApplicationConfiguration) {
if (appConfig?.properties === undefined) {
return;
} else if (Object.keys(appConfig.properties).length > 100) {
throw new Error(`Application configuration properties must have 100 or fewer entries. Received ${Object.keys(appConfig.properties).length}`);
}
}
private validatePropertiesNestedAppConfigBothNotUndefined(appConfig: ApplicationConfiguration) {
if (appConfig?.properties === undefined && appConfig?.nestedConfig === undefined) {
throw new Error('Application configuration must have either properties or nested app configurations defined.');
}
}
private validateAppConfig(config?: ApplicationConfiguration[]) {
if (config === undefined) {
return;
} else if (config.length > 100) {
throw new Error(`Application configuration array must have 100 or fewer entries. Received ${config.length}`);
} else {
config.forEach(element => this.validateAppConfig(element.nestedConfig));
config.forEach(element => this.validateAppConfigPropertiesLength(element));
config.forEach(element => this.validatePropertiesNestedAppConfigBothNotUndefined(element));
}
}
private isArrayOfStrings(value: any): boolean {
return Array.isArray(value) && value.every(item => typeof item === 'string');
}
private validateEntryPointArguments (entryPointArguments:sfn.TaskInput) {
if (typeof entryPointArguments.value === 'string' && !sfn.JsonPath.isEncodedJsonPath(entryPointArguments.value)) {
throw new Error(`Entry point arguments must be a string array or encoded JSON path, but received a non JSON path string');
.`);
}
if (!this.isArrayOfStrings(entryPointArguments.value)) {
throw new Error(`Entry point arguments must be a string array or encoded JSON path but received ${typeof entryPointArguments.value}.`);
}
}
private validateEntryPointArgumentsLength (entryPointArguments:sfn.TaskInput) {
if (this.isArrayOfStrings(entryPointArguments.value)
&& (entryPointArguments.value.length > 10280 || entryPointArguments.value.length < 1)) {
throw new Error(`Entry point arguments must be a string array between 1 and 10280 in length. Received ${entryPointArguments.value.length}.`);
}
}
private validateSparkSubmitParametersLength (sparkSubmitParameters : string) {
if (sparkSubmitParameters.length > 102400 || sparkSubmitParameters.length < 1) {
throw new Error(`Spark submit parameters must be between 1 and 102400 characters in length. Received ${sparkSubmitParameters.length}.`);
}
}
private validateEntryPoint (entryPoint: TaskInput) {
if (!sfn.JsonPath.isEncodedJsonPath(entryPoint.value) && (entryPoint.value.length > 256|| entryPoint.value.length < 1)) {
throw new Error(`Entry point must be between 1 and 256 characters in length. Received ${entryPoint.value.length}.`);
}
}
private validateSparkSubmitJobDriver (driver:SparkSubmitJobDriver) {
this.validateEntryPoint(driver.entryPoint);
if (driver.entryPointArguments) {
this.validateEntryPointArguments(driver.entryPointArguments);
this.validateEntryPointArgumentsLength(driver.entryPointArguments);
}
if (driver.sparkSubmitParameters) {
this.validateSparkSubmitParametersLength(driver.sparkSubmitParameters);
}
}
private assignLogGroup = () : any => {
if (this.props.monitoring?.logGroup) {
return (this.props.monitoring?.logGroup);
} else {
return (this.props.monitoring?.logging ? new logs.LogGroup(this, 'Monitoring Log Group') : undefined);
}
}
private assignLogBucket = () : any => {
if (this.props.monitoring?.logBucket) {
return (this.props.monitoring?.logBucket);
} else {
return (this.props.monitoring?.logging ? new s3.Bucket(this, 'Monitoring Bucket') : undefined);
}
}
// https://docs.aws.amazon.com/emr/latest/EMR-on-EKS-DevelopmentGuide/creating-job-execution-role.html
private createJobExecutionRole(): iam.Role {
const jobExecutionRole = new iam.Role(this, 'Job-Execution-Role', {
assumedBy: new iam.CompositePrincipal(
new iam.ServicePrincipal('emr-containers.amazonaws.com'),
new iam.ServicePrincipal('states.amazonaws.com'),
),
});
this.logBucket?.grantReadWrite(jobExecutionRole);
this.logGroup?.grantWrite(jobExecutionRole);
this.logGroup?.grant(jobExecutionRole, 'logs:DescribeLogStreams');
jobExecutionRole.addToPrincipalPolicy(
new iam.PolicyStatement({
resources: [
'arn:aws:logs:*:*:*',
],
actions: [
'logs:DescribeLogGroups',
],
}),
);
this.updateRoleTrustPolicy(jobExecutionRole);
return jobExecutionRole;
}
private grantMonitoringPolicies() {
this.logBucket?.grantReadWrite(this.role);
this.logGroup?.grantWrite(this.role);
this.logGroup?.grant(this.role, 'logs:DescribeLogStreams');
this.role.addToPrincipalPolicy(
new iam.PolicyStatement({
resources: [
'arn:aws:logs:*:*:*',
],
actions: [
'logs:DescribeLogGroups',
],
}),
);
}
/**
* If an execution role is not provided by user, the automatically generated job execution role must create a trust relationship
* between itself and the identity of the EMR managed service account in order to run jobs on the Kubernetes namespace.
*
* This cannot occur if the user provided virtualClusterId is within an encoded JSON path.
*
* The trust relationship can be created by updating the trust policy of the job execution role.
*
* @param role the automatically generated job execution role
*/
private updateRoleTrustPolicy(role: iam.Role) {
const eksClusterInfo = new cr.AwsCustomResource(this, 'GetEksClusterInfo', {
onCreate: {
service: 'EMRcontainers',
action: 'describeVirtualCluster',
parameters: {
id: this.props.virtualCluster.id,
},
outputPaths: ['virtualCluster.containerProvider.info.eksInfo.namespace', 'virtualCluster.containerProvider.id'],
physicalResourceId: cr.PhysicalResourceId.of('id'),
},
policy: cr.AwsCustomResourcePolicy.fromSdkCalls({
resources: cr.AwsCustomResourcePolicy.ANY_RESOURCE,
}),
});
/* We make use of custom resources to call update-roll-trust-policy as this command is only available through
* aws cli because this is only used during the initial setup and is not available through the sdk.
* https://awscli.amazonaws.com/v2/documentation/api/latest/reference/emr-containers/update-role-trust-policy.html
* Commands available through SDK: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/EMRcontainers.html
* Commands available through CLI: https://awscli.amazonaws.com/v2/documentation/api/latest/reference/emr-containers/index.html
*/
const cliLayer = new awscli.AwsCliLayer(this, 'awsclilayer');
const shellCliLambda = new lambda.SingletonFunction(this, 'Call Update-Role-Trust-Policy', {
uuid: '8693BB64-9689-44B6-9AAF-B0CC9EB8757C',
runtime: lambda.Runtime.PYTHON_3_6,
handler: 'index.handler',
code: lambda.Code.fromAsset(path.join(__dirname, 'utils/role-policy')),
timeout: cdk.Duration.seconds(30),
memorySize: 256,
layers: [cliLayer],
});
shellCliLambda.addToRolePolicy(
new iam.PolicyStatement({
resources: [
cdk.Stack.of(this).formatArn({
service: 'eks',
resource: 'cluster',
resourceName: eksClusterInfo.getResponseField('virtualCluster.containerProvider.id'),
}),
],
actions: [
'eks:DescribeCluster',
],
}),
);
shellCliLambda.addToRolePolicy(
new iam.PolicyStatement({
resources: [role.roleArn],
actions: [
'iam:GetRole',
'iam:UpdateAssumeRolePolicy',
],
}),
);
const provider = new cr.Provider(this, 'CustomResourceProvider', {
onEventHandler: shellCliLambda,
});
new cdk.CustomResource(this, 'Custom Resource', {
properties: {
eksNamespace: eksClusterInfo.getResponseField('virtualCluster.containerProvider.info.eksInfo.namespace'),
eksClusterId: eksClusterInfo.getResponseField('virtualCluster.containerProvider.id'),
roleName: role.roleName,
},
serviceToken: provider.serviceToken,
});
}
private createPolicyStatements(): iam.PolicyStatement[] {
const policyStatements = [
new iam.PolicyStatement({
resources: [
cdk.Stack.of(this).formatArn({
arnFormat: cdk.ArnFormat.SLASH_RESOURCE_SLASH_RESOURCE_NAME,
service: 'emr-containers',
resource: 'virtualclusters',
resourceName: sfn.JsonPath.isEncodedJsonPath(this.props.virtualCluster.id) ? '*' : this.props.virtualCluster.id, // Need wild card for dynamic start job run https://docs.aws.amazon.com/step-functions/latest/dg/emr-eks-iam.html
}),
],
actions: ['emr-containers:StartJobRun'],
conditions: {
StringEquals: {
'emr-containers:ExecutionRoleArn': this.role.roleArn,
},
},
}),
];
if (this.integrationPattern === sfn.IntegrationPattern.RUN_JOB) {
policyStatements.push(
new iam.PolicyStatement({
resources: [
cdk.Stack.of(this).formatArn({
arnFormat: cdk.ArnFormat.SLASH_RESOURCE_SLASH_RESOURCE_NAME,
service: 'emr-containers',
resource: 'virtualclusters',
resourceName: sfn.JsonPath.isEncodedJsonPath(this.props.virtualCluster.id) ? '*' : `${this.props.virtualCluster.id}/jobruns/*`, // Need wild card for dynamic start job run https://docs.aws.amazon.com/step-functions/latest/dg/emr-eks-iam.html
}),
],
actions: [
'emr-containers:DescribeJobRun',
'emr-containers:CancelJobRun',
],
}),
);
}
return policyStatements;
}
}
/**
* The information about job driver for Spark submit.
*/
export interface SparkSubmitJobDriver {
/**
* The entry point of job application.
*
* Length Constraints: Minimum length of 1. Maximum length of 256.
*/
readonly entryPoint: sfn.TaskInput;
/**
* The arguments for a job application in a task input object containing an array of strings
*
* Length Constraints: Minimum length of 1. Maximum length of 10280.
* @type sfn.TaskInput which expects payload as an array of strings
*
* @default - No arguments defined
*/
readonly entryPointArguments?: sfn.TaskInput;
/**
* The Spark submit parameters that are used for job runs.
*
* Length Constraints: Minimum length of 1. Maximum length of 102400.
*
* @default - No spark submit parameters
*/
readonly sparkSubmitParameters?: string;
}
/**
* Specify the driver that the EMR Containers job runs on.
* The job driver is used to provide an input for the job that will be run.
*/
export interface JobDriver {
/**
* The job driver parameters specified for spark submit.
*
* @see https://docs.aws.amazon.com/emr-on-eks/latest/APIReference/API_SparkSubmitJobDriver.html
*
*/
readonly sparkSubmitJobDriver: SparkSubmitJobDriver;
}
/**
* The classification within a EMR Containers application configuration.
* Class can be extended to add other classifications.
* For example, new Classification('xxx-yyy');
*/
export class Classification {
/**
* Sets the maximizeResourceAllocation property to true or false.
* When true, Amazon EMR automatically configures spark-defaults properties based on cluster hardware configuration.
*
* For more info:
* @see https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-spark-configure.html#emr-spark-maximizeresourceallocation
*/
static readonly SPARK = new Classification('spark');
/**
* Sets values in the spark-defaults.conf file.
*
* For more info:
* @see https://spark.apache.org/docs/latest/configuration.html
*/
static readonly SPARK_DEFAULTS = new Classification('spark-defaults');
/**
* Sets values in the spark-env.sh file.
*
* For more info:
* @see https://spark.apache.org/docs/latest/configuration.html#environment-variables
*/
static readonly SPARK_ENV = new Classification('spark-env');
/**
* Sets values in the hive-site.xml for Spark.
*/
static readonly SPARK_HIVE_SITE = new Classification('spark-hive-site');
/**
* Sets values in the log4j.properties file.
*
* For more settings and info:
* @see https://github.com/apache/spark/blob/master/conf/log4j.properties.template
*/
static readonly SPARK_LOG4J = new Classification('spark-log4j');
/**
* Sets values in the metrics.properties file.
*
* For more settings and info:
* @see https://github.com/apache/spark/blob/master/conf/metrics.properties.template
*/
static readonly SPARK_METRICS = new Classification('spark-metrics');
/**
* Creates a new Classification
*
* @param classificationStatement A literal string in case a new EMR classification is released, if not already defined.
*/
constructor(public readonly classificationStatement: string) { }
}
/**
* A configuration specification to be used when provisioning virtual clusters,
* which can include configurations for applications and software bundled with Amazon EMR on EKS.
*
* A configuration consists of a classification, properties, and optional nested configurations.
* A classification refers to an application-specific configuration file.
* Properties are the settings you want to change in that file.
* @see https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html
*/
export interface ApplicationConfiguration {
/**
* The classification within a configuration.
*
* Length Constraints: Minimum length of 1. Maximum length of 1024.
*/
readonly classification: Classification;
/**
* A list of additional configurations to apply within a configuration object.
*
* Array Members: Maximum number of 100 items.
*
* @default - No other configurations
*/
readonly nestedConfig?: ApplicationConfiguration[];
/**
* A set of properties specified within a configuration classification.
*
* Map Entries: Maximum number of 100 items.
*
* @default - No properties
*/
readonly properties?: { [key: string]: string };
}
/**
* Configuration setting for monitoring.
*/
export interface Monitoring {
/**
* Enable logging for this job.
*
* If set to true, will automatically create a Cloudwatch Log Group and S3 bucket.
* This will be set to `true` implicitly if values are provided for `logGroup` or `logBucket`.
*
* @default true - true if values are provided for `logGroup` or `logBucket`, false otherwise
*/
readonly logging?: boolean
/**
* A log group for CloudWatch monitoring.
*
* You can configure your jobs to send log information to CloudWatch Logs.
*
* @default - if `logging` is manually set to `true` and a `logGroup` is not provided, a `logGroup` will be automatically generated`.
*/
readonly logGroup?: logs.ILogGroup;
/**
* A log stream name prefix for Cloudwatch monitoring.
*
* @default - Log streams created in this log group have no default prefix
*/
readonly logStreamNamePrefix?: string;
/**
* Amazon S3 Bucket for monitoring log publishing.
*
* You can configure your jobs to send log information to Amazon S3.
*
* @default - if `logging` is manually set to `true` and a `logBucket` is not provided, a `logBucket` will be automatically generated`.
*/
readonly logBucket?: s3.IBucket;
/**
* Monitoring configurations for the persistent application UI.
*
* @default true
*/
readonly persistentAppUI?: boolean;
}
/**
* The Amazon EMR release version to use for the job run.
*
* Can be extended to include new EMR releases
*
* For example, `new ReleaseLabel('emr-x.xx.x-latest');`
*/
export class ReleaseLabel {
/**
* EMR Release version 5.32.0
*/
static readonly EMR_5_32_0 = new ReleaseLabel('emr-5.32.0-latest');
/**
* EMR Release version 5.33.0
*/
static readonly EMR_5_33_0 = new ReleaseLabel('emr-5.33.0-latest');
/**
* EMR Release version 6.2.0
*/
static readonly EMR_6_2_0 = new ReleaseLabel('emr-6.2.0-latest');
/**
* EMR Release version 6.3.0
*/
static readonly EMR_6_3_0 = new ReleaseLabel('emr-6.3.0-latest');
/**
* Initializes the label string.
*
* @param label A literal string that contains the release-version ex. 'emr-x.x.x-latest'
*/
constructor(public readonly label: string) { }
}
/**
* Class that returns a virtual cluster's id depending on input type
*/
export class VirtualClusterInput {
/**
* Input for a virtualClusterId from a Task Input
*/
static fromTaskInput(taskInput: sfn.TaskInput): VirtualClusterInput {
return new VirtualClusterInput(taskInput.value);
}
/**
* Input for virtualClusterId from a literal string
*/
static fromVirtualClusterId(virtualClusterId: string): VirtualClusterInput {
return new VirtualClusterInput(virtualClusterId);
}
/**
* Initializes the virtual cluster ID.
*
* @param id The VirtualCluster Id
*/
private constructor(public readonly id: string) { }
} | the_stack |
import {Block} from './Block';
import {IApp} from '../IApp';
import {IAudioChain} from '../Core/Audio/Connections/IAudioChain';
import {IEffect} from './IEffect';
import {ISource} from './ISource';
import {Particle} from '../Particle';
import {Power} from './Power/Power';
import {SoundCloudTrack} from '../Core/Audio/SoundCloud/SoundcloudTrack';
import {VoiceCreator as Voice} from './Interaction/VoiceObject';
import {Controller} from './Interaction/Controller';
import IDisplayContext = etch.drawing.IDisplayContext;
import ObservableCollection = etch.collections.ObservableCollection;
import Point = etch.primitives.Point;
declare var App: IApp;
export class Source extends Block implements ISource {
//-------------------------------------------------------------------------------------------
// INIT
//-------------------------------------------------------------------------------------------
public Connections: ObservableCollection<IEffect> = new ObservableCollection<IEffect>();
public Sources: any[];
public Grains: any[];
public Envelopes: Tone.AmplitudeEnvelope[];
public AudioInput: Tone.Mono;
public Settings: ToneSettings = {
envelope: {
attack: 0.01,
decay: 0.5,
sustain: 0.5,
release: 0.01
},
output: {
volume: 0.5
}
};
public ActiveVoices: Voice[];
public FreeVoices: Voice[];
public MonoVoice: Voice;
public ParticlePowered: boolean;
public LaserPowered: boolean;
public UpdateCollision: boolean;
public Collisions: any[];
public CheckRange: number;
public SearchResults: SoundCloudTrack[];
public Searching: boolean;
public ResultsPage: number;
public SearchString: string;
public PlaybackSignal: any;
public PowerAmount: number = 0;
Init(drawTo: IDisplayContext): void {
super.Init(drawTo);
this.Sources = [];
this.Envelopes = [];
this.ActiveVoices = [];
this.FreeVoices = [];
this.ParticlePowered = false;
this.LaserPowered = false;
if (!(this instanceof Power)) { //TODO: Power is an Effect, did we mean this? Looks like non-sound sources are creating AudioInputs. Try the IsASoundSource() check.
this.AudioInput = new Tone.Mono();
this.AudioInput.connect(App.Audio.Master);
}
this.UpdateOptionsForm();
}
//-------------------------------------------------------------------------------------------
// CONNECTIONS
//-------------------------------------------------------------------------------------------
/**
* Add effect to this Source's list of effects and call Effect.Attach()
* @param effect
*/
AddEffect(effect: IEffect) {
this.Connections.Add(effect);
//effect.Attach(<ISource>this);
}
/**
* Remove effect from this Source's list of effects and call Effect.Detach()
* @param effect
*/
RemoveEffect(effect: IEffect) {
this.Connections.Remove(effect);
//effect.Detach(<ISource>this);
}
/**
* Validate that the block's effects still exist
*/
public ValidateEffects() {
for (let i = 0; i < this.Connections.Count; i++){
let effect:IEffect = this.Connections.GetValueAt(i);
if (!App.Effects.contains(effect)){
this.RemoveEffect(effect);
}
}
}
UpdateConnections(chain: IAudioChain) {
super.UpdateConnections(chain);
// Reset Envelopes back to original setting
this._EnvelopeReset();
// Release any disconnected voices
this.DeactivateDisconnectedVoices();
}
private _EnvelopeReset() {
if (this.Envelopes.length) {
this.Envelopes.forEach((e: Tone.AmplitudeEnvelope) => {
e.attack = this.Settings.envelope.attack;
e.decay = this.Settings.envelope.decay;
e.sustain = this.Settings.envelope.sustain;
e.release = this.Settings.envelope.release;
});
} else if (this.Sources[0] instanceof Tone.Simpler) {
this.Sources.forEach((s: Tone.Simpler) => {
const e = s.envelope;
e.attack = this.Settings.envelope.attack;
e.decay = this.Settings.envelope.decay;
e.sustain = this.Settings.envelope.sustain;
e.release = this.Settings.envelope.release;
});
}
}
CreateSource(){
if (this.Sources[this.Sources.length-1]){
return this.Sources[this.Sources.length-1];
}
}
CreateEnvelope(){
if (this.Envelopes[this.Envelopes.length-1]) {
return this.Envelopes[this.Envelopes.length-1];
}
}
//-------------------------------------------------------------------------------------------
// INSTANCE CHECK
//-------------------------------------------------------------------------------------------
// RETURNS TRUE IF ENVELOPES ARE FOUND //
IsASoundSource() {
return (this.Envelopes.length || this.Sources.length );
}
// RETURNS TRUE IF BLOKCKNAME MATCHES NAME //
// where name is a reference to App.L10n
IsMyName(name:string) {
return (name === this.BlockName);
}
//-------------------------------------------------------------------------------------------
// TRIGGER ENVELOPES / SAMPLERS
//-------------------------------------------------------------------------------------------
Stop() {
//this.TriggerRelease('all', true);
this.DeactivateAllVoices();
}
/**
* Trigger a sources attack
* If no index is set trigger the first in the array
* @param {number | string} index
* Index is the position of the Envelope in Envelopes[].
* If index is set to 'all', all envelopes will be triggered
*/
TriggerAttack(index: number|string = 0) {
//console.log('TriggerAttack: ',this);
// Only if the source has envelopes
if (this.Envelopes.length) {
if (index === 'all'){
// Trigger all the envelopes
this.Envelopes.forEach((e: any)=> {
e.triggerAttack();
});
} else {
// Trigger the specific one
this.Envelopes[index].triggerAttack();
}
// Or Samplers have built in envelopes
} else if (this.Sources[0] && this.Sources[0].envelope) {
if (index === 'all'){
// Trigger all the envelopes
this.Sources.forEach((s: any)=> {
s.triggerAttack();
});
} else {
// Trigger the specific one
this.Sources[index].triggerAttack();
}
// Or this is a laser which needs to update it's collision check after being powered
} else if (this.UpdateCollision!==undefined) {
this.UpdateCollision = true;
}
}
/**
* Trigger a sources release
* If no index is set release the first in the array
* @param index {number|string} position of the Envelope in Envelopes[].
* @param forceRelease {boolean} if set to true Envelopes will release regardless of power status
* If index is set to 'all', all envelopes will be released
*/
TriggerRelease(index: number|string = 0, forceRelease: boolean = false) {
// SAMPLERS HAVE THEIR OWN TRIGGERRELEASE
// Only if the source has envelopes
if (this.Envelopes.length) {
if (index === 'all'){
// Trigger all the envelopes
this.Envelopes.forEach((e: any)=> {
e.triggerRelease();
});
} else {
// Trigger the specific one
this.Envelopes[index].triggerRelease();
}
// Or Samplers have built in envelopes
} else if (this.Sources[0] && this.Sources[0].envelope) {
if (index === 'all'){
// Trigger all the envelopes
this.Sources.forEach((s: any)=> {
s.triggerRelease();
});
} else {
// Trigger the specific one
this.Sources[index].triggerRelease();
}
// Or this is a laser which needs to update it's collision check after being unpowered
} else if (this.UpdateCollision!==undefined) {
this.UpdateCollision = true;
}
//}
}
TriggerAttackRelease(duration: number = App.Config.PulseLength, time: Tone.Time = '+0.01', velocity?: number) {
// Oscillators & Noises & Players
if (this.Envelopes.length){
//TODO: add velocity to all trigger methods
//TODO: add samplers and players
this.Envelopes[0].triggerAttackRelease(duration);
// Samplers
} else if (this.Sources[0] && this.Sources[0].envelope) {
// Trigger all the envelopes
this.Sources[0].triggerAttackRelease(false, duration, time); // the false is "sample name" parameter
// Power Source Blocks
} else if (this.PowerAmount!==undefined) {
//this.PowerConnections += 1;
this.AddPower();
if (this.UpdateCollision!==undefined) {
this.UpdateCollision = true;
}
var block = this;
var seconds = App.Audio.Tone.toSeconds(duration) * 1000;
setTimeout( function() {
//block.PowerConnections -= 1;
block.RemovePower();
if (block.UpdateCollision!==undefined) {
block.UpdateCollision = true;
}
},seconds);
}
}
//-------------------------------------------------------------------------------------------
// MESSAGES
//-------------------------------------------------------------------------------------------
// ON //
NoteOn(controller: string, note?:number, polyphonic?: boolean, glide?:number, velocity?:number) {
note = note || App.Config.DefaultNote;
polyphonic = polyphonic || false;
glide = glide || 0;
velocity = velocity || 0;
// What voice are we using //
var voiceData = this.GetVoice(note, controller, polyphonic);
// Should it glide //
if (voiceData.trigger || this.ActiveVoices.length > 1) {
glide = 0;
}
// Set the voice pitch //
this.SetPitch(note,voiceData.ID,glide);
// Trigger the voice envelope //
if (voiceData.trigger) {
this.TriggerAttack(voiceData.ID);
}
}
// OFF //
NoteOff(controller: string, note?:number) {
note = note || App.Config.DefaultNote;
// What voice are we using //
var voiceData = this.RemoveVoice(note, controller);
// Release the voice envelope //
if (voiceData.trigger) {
this.TriggerRelease(voiceData.ID);
}
}
// UPDATE //
NoteUpdate() {
this.ActiveVoices.forEach((voice) => {
// Set the voice pitch //
this.SetPitch(voice.Note,voice.ID);
});
}
//-------------------------------------------------------------------------------------------
// VOICES
//-------------------------------------------------------------------------------------------
// GET A VOICE FOR NOTE ON //
GetVoice(note: number, controller: string, polyphonic: boolean) {
var voice: Voice;
var trigger: boolean = true;
var id: number = -1;
// MONOPHONIC //
if (!polyphonic) {
// Grab a fresh mono voice //
if (!this.MonoVoice) {
if (this.FreeVoices.length > 0){
voice = this.FreeVoices.shift();
}
else {
voice = this.ActiveVoices.shift();
trigger = false;
}
this.MonoVoice = voice;
}
// We already have a mono voice in use, update that //
else {
voice = this.MonoVoice;
trigger = false;
}
}
// POLYPHONIC //
else {
if (this.FreeVoices.length > 0){
voice = this.FreeVoices.shift();
}
else {
voice = this.ActiveVoices.shift();
trigger = false;
}
}
// Set/update Voice data and add to ActiveVoices //
if (voice) {
voice.Note = note;
voice.Controller = controller;
id = voice.ID;
if (this.ActiveVoices.indexOf(voice)==-1) {
this.ActiveVoices.push( voice );
}
}
// Give voice data to NoteOn() //
return {
ID: id,
trigger: trigger
};
}
// GET THE VOICE FOR NOTE OFF //
RemoveVoice(note: number, controller: string) {
var voice: Voice;
var trigger: boolean = false;
var id: number = -1;
for (var i=0; i< this.ActiveVoices.length; i++) {
var activeVoice = this.ActiveVoices[i];
// if note is saved in the ActiveVoices & controlled by this controller, stop it //
if (activeVoice.Note === note && activeVoice.Controller === controller) {
voice = activeVoice;
trigger = true;
id = voice.ID;
// if this was the mono voice, free it //
if (voice === this.MonoVoice) {
if (this.IsPowered()) {
this.AssignMonoVoiceToPower();
trigger = false;
continue;
}
this.MonoVoice = null;
}
// Move from active to free //
this.ActiveVoices.splice(i, 1);
this.FreeVoices.push(activeVoice);
}
}
// Give voice data to NoteOn() //
return {
ID: id,
trigger: trigger
};
}
// RETRIGGER ALL ACTIVE VOICES //
RetriggerActiveVoices() {
this.ActiveVoices.forEach((activeVoice: Voice, i: number) => {
this.TriggerRelease(i);
this.TriggerAttack(i);
});
}
// KILL ALL VOICES // probably shouldn't use this, unless disposing block
DeactivateAllVoices() {
this.ActiveVoices.forEach((activeVoice: Voice, i: number) => {
// Move from active to free //
this.ActiveVoices.splice(i, 1);
this.FreeVoices.push(activeVoice);
this.TriggerRelease(i);
});
}
// KILL DISCONNECTED VOICES //
DeactivateDisconnectedVoices() {
var controllers = ["power"];
var voices = [];
// Get connected controllers //
let connections: IEffect[] = this.Connections.ToArray();
connections.forEach((connection: IEffect) => {
if (connection instanceof Controller) {
controllers.push(""+connection.Id);
}
});
// Get voices controlled by disconnected controllers, leave connected ones //
this.ActiveVoices.forEach((activeVoice: Voice) => {
var deactivate: boolean = true;
for(var h=0; h<controllers.length; h++) {
if (activeVoice.Controller === controllers[h]) {
deactivate = false;
}
}
if (deactivate) {
voices.push(activeVoice);
}
});
// Deactivate these voices //
for (var i=0; i<voices.length; i++) {
if (voices[i] === this.MonoVoice) {
// If this is the Mono voice and we're powered, transfer the voice to power and skip deactivation
if (this.IsPowered()) {
this.AssignMonoVoiceToPower();
continue;
}
this.MonoVoice = null;
}
var n = this.ActiveVoices.indexOf(voices[i]);
this.ActiveVoices.splice(n, 1);
this.FreeVoices.push(voices[i]);
this.TriggerRelease(voices[i].ID);
}
}
AssignMonoVoiceToPower() {
this.MonoVoice.Controller = "power";
this.MonoVoice.Note = App.Config.DefaultNote;
this.SetPitch(this.MonoVoice.Note,this.MonoVoice.ID);
}
// CREATE OUR FIRST VOICE FOR MONO / POWER //
CreateFirstVoice() {
if (!this.IsASoundSource()) return;
if (this.FreeVoices.length < 1) {
this.FreeVoices.push( new Voice(0) );
}
}
// CREATE THE REST OF OUR VOICES FOR POLY //
CreateVoices() { // MOVED HERE FROM INTERACTION //
if (!this.IsASoundSource()) return;
if (this.IsMyName(App.L10n.Blocks.Source.Blocks.Granular.name)) return;
// Work out how many voices we actually need (we may already have some)
let diff: number = App.Config.PolyphonicVoices - this.FreeVoices.length;
// If we haven't got enough sources, create however many we need.
if (diff > 0){
// Loop through and create the voices
for (let i = 1; i <= App.Config.PolyphonicVoices; i++) {
// Create a source and an envelope
let s: Tone.Source = this.CreateSource();
let e: Tone.AmplitudeEnvelope = this.CreateEnvelope();
if (e) {
// Connect the source to the Envelope and start
s.connect(e);
s.start();
// Connect Envelope to the Effects Chain
e.connect(this.AudioInput);
} else {
// No CreateEnvelope()
// Check if it's a Sampler Source (they have their own envelopes built in)
if (this.Sources[0] instanceof Tone.Simpler) {
e = this.Sources[i].envelope;
s.connect(this.AudioInput)
}
}
// Add the source and envelope to our FreeVoices list
this.FreeVoices.push( new Voice(i) );
}
}
}
// RESET VOICES BACK TO 1 (This is a temporary fix due to issue #289) //
// TODO: temp fix until we properly look at voices, could we maybe make switching back from poly remove the extra voices and just call the CreateVoices function every time we switch to poly?
RemoveExtraVoices() {
if (!this.IsASoundSource()) return;
if (this.IsMyName(App.L10n.Blocks.Source.Blocks.Granular.name)) return;
this.DeactivateAllVoices();
this.TriggerRelease('all');
this.FreeVoices.length = 1;
this.ActiveVoices.length = 0;
}
//-------------------------------------------------------------------------------------------
// PITCH
//-------------------------------------------------------------------------------------------
MidiToFrequency(note: number) {
return App.Audio.Tone.midiToFrequency(note);
}
MidiToPlayback(note: number) {
return App.Audio.Tone.intervalToFrequencyRatio(note - App.Config.DefaultNote);
}
// ADD UP SOURCE PITCH MODS //
GetSourcePitchMods() {
var sourceMods = 0;
if (this.Params.transpose) {
sourceMods += this.Params.transpose;
}
if (this.Params.fine) {
sourceMods += this.Params.fine;
}
if (this.Params.playbackRate) {
sourceMods += this.Params.playbackRate;
}
if (this.Params.detune) {
sourceMods += this.Params.detune;
}
return sourceMods;
}
// ADD UP CONNECTED PITCH MODS //
GetConnectedPitchMods() {
var controllerMods = 0;
let connections: IEffect[] = this.Connections.ToArray();
connections.forEach((connection: IEffect) => {
if (connection instanceof Controller) {
controllerMods += connection.GetPitchMods();
}
});
return controllerMods;
}
// SET A VOICE'S PITCH //
SetPitch(note: number, voiceNo?: number, glide?: Tone.Time) {
const voiceNumber: number = voiceNo ? voiceNo : 0;
const time: Tone.Time = glide ? glide : 0;
// CONVERT PITCH //
var pitch = note + this.GetConnectedPitchMods() + this.GetSourcePitchMods();
const frequency = this.MidiToFrequency(pitch);
const playback = this.MidiToPlayback(pitch);
// OSCILLATOR //
if (this.Sources[voiceNumber].frequency) {
this.Sources[voiceNumber].frequency.linearRampToValue(frequency, time);
}
// SAMPLER //
else if (this.Sources[voiceNumber].player) {
if ((<any>Tone).isSafari){
this.Sources[voiceNumber].player.playbackRate = playback;
} else {
this.Sources[voiceNumber].player.playbackRate.linearRampToValue(playback, time);
}
}
// PLAYER //
else if (this.Sources[0].playbackRate instanceof Tone.Signal) {
if ((<any>Tone).isSafari){
this.Sources[voiceNumber].playbackRate = playback;
} else {
this.Sources[voiceNumber].playbackRate.linearRampToValue(playback, time);
}
}
// GRANULAR //
else if (this.Grains) {
for (var i=0; i<this.Grains.length; i++) {
if ((<any>Tone).isSafari) {
(<any>this.Grains[i]).playbackRate = playback;
} else {
this.Grains[i].playbackRate.linearRampToValue(playback, time);
}
}
}
}
//-------------------------------------------------------------------------------------------
// INTERACTION
//-------------------------------------------------------------------------------------------
MouseDown() {
super.MouseDown();
this.AddPower();
}
MouseUp() {
super.MouseUp();
this.RemovePower();
}
//-------------------------------------------------------------------------------------------
// POWER
//-------------------------------------------------------------------------------------------
AddPower() {
this.PowerAmount++;
if (this.PowerAmount === 1) {
if (App.MainScene) {
App.MainScene.ConnectionLines.UpdateList();
}
if (!this.IsASoundSource()) return;
this.NoteOn("power");
}
}
RemovePower() {
if (this.PowerAmount === 0) {
return;
}
this.PowerAmount--;
if (this.PowerAmount === 0) {
if (App.MainScene) {
App.MainScene.ConnectionLines.UpdateList();
}
if (!this.IsASoundSource()) return;
this.NoteOff("power");
}
}
/**
* Checks whether the block is connected to a Power
* @returns {boolean}
*/
IsPowered(): boolean {
if (this.PowerAmount>0) {
return true;
}
return false;
}
/**
* When a particle hits a source it triggers the attack and release
* @param particle
* @constructor
*/
ParticleCollision(particle: Particle) {
super.ParticleCollision(particle);
this.AddPower();
var that = this;
setTimeout(function(){
that.RemovePower();
}, App.Config.PulseLength);
particle.Dispose();
}
//-------------------------------------------------------------------------------------------
// DISPOSE
//-------------------------------------------------------------------------------------------
/**
* Disposes the audio nodes
* @constructor
*/
Dispose() {
super.Dispose();
// Delete Signal nodes
if (this.AudioInput) this.AudioInput.dispose();
//if (this.EffectsChainOutput) this.EffectsChainOutput.dispose();
this.DeactivateAllVoices();
if (this.ActiveVoices.length) {
this.ActiveVoices.forEach((s: Voice)=> {
s.ID = null
});
this.ActiveVoices = [];
}
if (this.FreeVoices.length) {
this.FreeVoices.forEach((s: Voice)=> {
s.ID = null;
});
this.FreeVoices = [];
}
}
} | the_stack |
import { Readable } from 'stream';
import { Test, TestingModule } from '@nestjs/testing';
import { ConfigService } from '@nestjs/config';
import * as winston from 'winston';
import { PrismaService } from 'nestjs-prisma';
import { StorageService } from '@codebrew/nestjs-storage';
import { WINSTON_MODULE_PROVIDER } from 'nest-winston';
import { orderBy } from 'lodash';
import { Prisma } from '@prisma/client';
import {
ACTION_JOB_DONE_LOG,
GENERATE_STEP_MESSAGE,
GENERATE_STEP_NAME,
ACTION_ZIP_LOG,
BuildService,
ENTITIES_INCLUDE,
BUILD_DOCKER_IMAGE_STEP_MESSAGE,
BUILD_DOCKER_IMAGE_STEP_NAME,
BUILD_DOCKER_IMAGE_STEP_START_LOG,
BUILD_DOCKER_IMAGE_STEP_RUNNING_LOG,
BUILD_DOCKER_IMAGE_STEP_FINISH_LOG,
BUILD_DOCKER_IMAGE_STEP_FAILED_LOG,
ACTION_INCLUDE
} from './build.service';
import * as DataServiceGenerator from '@amplication/data-service-generator';
import { ContainerBuilderService } from '@amplication/container-builder/dist/nestjs';
import { EntityService } from '..';
import { AppRoleService } from '../appRole/appRole.service';
import { AppService } from '../app/app.service';
import { ActionService } from '../action/action.service';
import { LocalDiskService } from '../storage/local.disk.service';
import { Build } from './dto/Build';
import { getBuildTarGzFilePath, getBuildZipFilePath } from './storage';
import { FindOneBuildArgs } from './dto/FindOneBuildArgs';
import { BuildNotFoundError } from './errors/BuildNotFoundError';
import { DeploymentService } from '../deployment/deployment.service';
import { UserService } from '../user/user.service';
import {
BuildResult,
EnumBuildStatus as ContainerBuildStatus
} from '@amplication/container-builder/dist/';
import { EnumBuildStatus } from 'src/core/build/dto/EnumBuildStatus';
import { App, Commit, Entity } from 'src/models';
import {
ActionStep,
EnumActionLogLevel,
EnumActionStepStatus
} from '../action/dto';
import { Deployment } from '../deployment/dto/Deployment';
import { EnumDeploymentStatus } from '../deployment/dto/EnumDeploymentStatus';
import { Environment } from '../environment/dto';
import { GithubService } from '../github/github.service';
import { AppSettingsService } from '../appSettings/appSettings.service';
import { AppSettingsValues } from '../appSettings/constants';
import { EnumAuthProviderType } from '../appSettings/dto/EnumAuthenticationProviderType';
jest.mock('winston');
jest.mock('@amplication/data-service-generator');
const winstonConsoleTransportOnMock = jest.fn();
const MOCK_CONSOLE_TRANSPORT = {
on: winstonConsoleTransportOnMock
};
const winstonLoggerDestroyMock = jest.fn();
const MOCK_LOGGER = {
destroy: winstonLoggerDestroyMock
};
// eslint-disable-next-line
// @ts-ignore
winston.createLogger.mockImplementation(() => MOCK_LOGGER);
// eslint-disable-next-line
// @ts-ignore
winston.transports.Console = jest.fn(() => MOCK_CONSOLE_TRANSPORT);
const EXAMPLE_COMMIT_ID = 'exampleCommitId';
const EXAMPLE_BUILD_ID = 'ExampleBuildId';
const EXAMPLE_USER_ID = 'ExampleUserId';
const EXAMPLE_ENTITY_VERSION_ID = 'ExampleEntityVersionId';
const EXAMPLE_APP_ID = 'exampleAppId';
const EXAMPLE_DATE = new Date('2020-01-01');
const JOB_STARTED_LOG = 'Build job started';
const JOB_DONE_LOG = 'Build job done';
const EXAMPLE_DEPLOYMENT_ID = 'exampleDeploymentId';
const EXAMPLE_ENVIRONMENT_ID = 'exampleEnvironmentId';
const EXAMPLE_DEPLOYMENT_MESSAGE = 'exampleDeploymentMessage';
const EXAMPLE_ACTION_ID = 'exampleActionId';
const EXAMPLE_ENVIRONMENT_NAME = 'exampleEnvironmentName';
const EXAMPLE_ADDRESS = 'exampleAddress';
const EXAMPLE_MESSAGE = 'exampleMessage';
const EXAMPLE_APP_SETTINGS_VALUES: AppSettingsValues = {
dbHost: 'localhost',
dbName: 'myDb',
dbPassword: '1234',
dbPort: 5432,
dbUser: 'admin',
appId: EXAMPLE_APP_ID,
authProvider: EnumAuthProviderType.Http
};
const EXAMPLE_COMMIT: Commit = {
id: EXAMPLE_COMMIT_ID,
createdAt: new Date(),
userId: EXAMPLE_USER_ID,
message: EXAMPLE_MESSAGE
};
const EXAMPLE_GENERATE_STEP = {
id: 'ExampleActionStepId',
createdAt: new Date(),
message: GENERATE_STEP_MESSAGE,
name: GENERATE_STEP_NAME,
status: EnumActionStepStatus.Running
};
const EXAMPLE_COMPLETED_GENERATE_STEP = {
...EXAMPLE_GENERATE_STEP,
status: EnumActionStepStatus.Success,
completedAt: new Date()
};
const EXAMPLE_FAILED_GENERATE_STEP = {
...EXAMPLE_GENERATE_STEP,
status: EnumActionStepStatus.Failed,
completedAt: new Date()
};
const EXAMPLE_DOCKER_IMAGE_STEP = {
id: 'ExampleDockerImageStep',
createdAt: new Date(),
message: BUILD_DOCKER_IMAGE_STEP_MESSAGE,
name: BUILD_DOCKER_IMAGE_STEP_NAME,
status: EnumActionStepStatus.Running
};
const EXAMPLE_COMPLETED_DOCKER_IMAGE_STEP = {
...EXAMPLE_DOCKER_IMAGE_STEP,
status: EnumActionStepStatus.Success,
completedAt: new Date()
};
const EXAMPLE_ACTION = {
id: 'ExampleActionId',
createdAt: new Date(),
steps: [EXAMPLE_GENERATE_STEP]
};
const EXAMPLE_BUILD: Build = {
id: EXAMPLE_BUILD_ID,
createdAt: EXAMPLE_DATE,
userId: EXAMPLE_USER_ID,
appId: EXAMPLE_APP_ID,
version: '1.0.0',
message: 'new build',
actionId: EXAMPLE_ACTION.id,
images: [],
commitId: EXAMPLE_COMMIT_ID,
action: EXAMPLE_ACTION
};
const EXAMPLE_COMPLETED_BUILD: Build = {
...EXAMPLE_BUILD,
id: 'ExampleSuccessfulBuild',
containerStatusQuery: true,
containerStatusUpdatedAt: new Date(),
action: {
id: 'ExampleSuccessfulBuildAction',
createdAt: new Date(),
steps: [
EXAMPLE_COMPLETED_GENERATE_STEP,
EXAMPLE_COMPLETED_DOCKER_IMAGE_STEP
]
}
};
const EXAMPLE_RUNNING_BUILD: Build = {
...EXAMPLE_BUILD,
id: 'ExampleRunningBuild',
containerStatusQuery: true,
containerStatusUpdatedAt: new Date()
};
const EXAMPLE_FAILED_BUILD: Build = {
...EXAMPLE_BUILD,
id: 'ExampleFailedBuild',
action: {
id: 'ExampleFailedBuildAction',
createdAt: new Date(),
steps: [EXAMPLE_FAILED_GENERATE_STEP]
}
};
const EXAMPLE_RUNNING_DELAYED_BUILD = {
...EXAMPLE_RUNNING_BUILD,
id: 'ExampleRunningDelayedBuild',
action: {
id: 'ExampleRunningDelayedBuildAction',
createdAt: new Date(),
steps: [EXAMPLE_GENERATE_STEP, EXAMPLE_DOCKER_IMAGE_STEP]
}
};
const EXAMPLE_INVALID_BUILD: Build = {
...EXAMPLE_BUILD,
id: 'ExampleInvalidBuild',
action: undefined
};
const EXAMPLE_ENVIRONMENT: Environment = {
id: EXAMPLE_ENVIRONMENT_ID,
createdAt: new Date(),
updatedAt: new Date(),
name: EXAMPLE_ENVIRONMENT_NAME,
address: EXAMPLE_ADDRESS,
appId: EXAMPLE_APP_ID
};
const EXAMPLE_DEPLOYMENT: Deployment = {
id: EXAMPLE_DEPLOYMENT_ID,
status: EnumDeploymentStatus.Waiting,
createdAt: new Date(),
userId: EXAMPLE_USER_ID,
buildId: EXAMPLE_BUILD_ID,
environmentId: EXAMPLE_ENVIRONMENT_ID,
message: EXAMPLE_DEPLOYMENT_MESSAGE,
actionId: EXAMPLE_ACTION_ID,
build: EXAMPLE_BUILD,
environment: EXAMPLE_ENVIRONMENT
};
const EXAMPLE_USER = {
id: EXAMPLE_USER_ID
};
const EXAMPLE_COMPLETED_BUILD_RESULT: BuildResult = {
status: ContainerBuildStatus.Completed
};
const EXAMPLE_FAILED_BUILD_RESULT: BuildResult = {
status: ContainerBuildStatus.Failed
};
const EXAMPLE_RUNNING_BUILD_RESULT: BuildResult = {
status: ContainerBuildStatus.Running
};
const EXAMPLE_APP_ROLES = [];
const EXAMPLE_APP: App = {
id: 'exampleAppId',
createdAt: new Date(),
updatedAt: new Date(),
name: 'exampleAppName',
description: 'example App Description',
color: '#20A4F3',
githubSyncEnabled: false
};
const EXAMPLE_BUILD_INCLUDE_APP_AND_COMMIT: Build = {
...EXAMPLE_BUILD,
commit: EXAMPLE_COMMIT,
app: EXAMPLE_APP
};
const commitId = EXAMPLE_COMMIT_ID;
const version = commitId.slice(commitId.length - 8);
const EXAMPLE_CREATE_INITIAL_STEP_DATA = {
message: 'Adding task to queue',
name: 'ADD_TO_QUEUE',
status: EnumActionStepStatus.Success,
completedAt: EXAMPLE_DATE,
logs: {
create: [
{
level: EnumActionLogLevel.Info,
message: 'create build generation task',
meta: {}
},
{
level: EnumActionLogLevel.Info,
message: `Build Version: ${version}`,
meta: {}
},
{
level: EnumActionLogLevel.Info,
message: `Build message: ${EXAMPLE_BUILD.message}`,
meta: {}
}
]
}
};
const EXAMPLE_MODULES = [];
const prismaBuildCreateMock = jest.fn(
() => EXAMPLE_BUILD_INCLUDE_APP_AND_COMMIT
);
const prismaBuildFindOneMock = jest.fn();
const prismaBuildFindManyMock = jest.fn(() => {
return [EXAMPLE_BUILD];
});
const prismaBuildUpdateMock = jest.fn();
const entityServiceGetLatestVersionsMock = jest.fn(() => {
return [{ id: EXAMPLE_ENTITY_VERSION_ID }];
});
const EXAMPLE_FIRST_ENTITY_NAME = 'AA First Entity';
const EXAMPLE_SECOND_ENTITY_NAME = 'BB second Entity';
const EXAMPLE_ENTITIES: Entity[] = [
{
id: 'EXAMPLE_SECOND_ID',
createdAt: new Date('2020-02-17 18:20:20'),
updatedAt: new Date(),
appId: 'exampleAppId',
name: EXAMPLE_SECOND_ENTITY_NAME,
displayName: 'Second entity',
pluralDisplayName: 'Second entity plural display name'
},
{
id: 'EXAMPLE_FIRST_ID',
createdAt: new Date('2020-02-10 18:20:20'), //created first
updatedAt: new Date(),
appId: 'exampleAppId',
name: EXAMPLE_FIRST_ENTITY_NAME,
displayName: 'First entity',
pluralDisplayName: 'First entity plural display name'
}
];
const entityServiceGetEntitiesByVersionsMock = jest.fn(() => EXAMPLE_ENTITIES);
const appRoleServiceGetAppRolesMock = jest.fn(() => EXAMPLE_APP_ROLES);
const appServiceGetAppMock = jest.fn(() => EXAMPLE_APP);
const EXAMPLE_ACTION_STEP: ActionStep = {
id: 'EXAMPLE_ACTION_STEP_ID',
name: 'EXAMPLE_ACTION_STEP_NAME',
createdAt: new Date(),
message: 'EXAMPLE_ACTION_STEP_MESSAGE',
status: EnumActionStepStatus.Running
};
const EXAMPLE_FAILED_ACTION_STEP: ActionStep = {
...EXAMPLE_ACTION_STEP,
status: EnumActionStepStatus.Failed
};
const deploymentFindManyMock = jest.fn();
const actionServiceRunMock = jest.fn(
async (
actionId: string,
stepName: string,
message: string,
stepFunction: (step: { id: string }) => Promise<any>,
leaveStepOpenAfterSuccessfulExecution = false
) => {
return stepFunction(EXAMPLE_ACTION_STEP);
}
);
const actionServiceLogInfoMock = jest.fn();
const actionServiceLogMock = jest.fn();
const EXAMPLE_DOCKER_BUILD_RESULT_RUNNING: BuildResult = {
status: ContainerBuildStatus.Running,
statusQuery: { id: 'buildId' }
};
const containerBuilderServiceBuildMock = jest.fn(
() => EXAMPLE_DOCKER_BUILD_RESULT_RUNNING
);
const EXAMPLE_STREAM = new Readable();
const EXAMPLE_URL = 'EXAMPLE_URL';
const storageServiceDiskExistsMock = jest.fn(() => ({ exists: true }));
const storageServiceDiskStreamMock = jest.fn(() => EXAMPLE_STREAM);
const storageServiceDiskPutMock = jest.fn();
const storageServiceDiskGetUrlMock = jest.fn(() => EXAMPLE_URL);
const EXAMPLE_LOCAL_DISK = {
config: {
root: 'EXAMPLE_ROOT'
}
};
const localDiskServiceGetDiskMock = jest.fn(() => EXAMPLE_LOCAL_DISK);
const EXAMPLED_HOST = 'http://localhost';
const configServiceGetMock = jest.fn(() => EXAMPLED_HOST);
const loggerErrorMock = jest.fn(error => {
// Write the error to console so it will be visible for who runs the test
console.error(error);
});
const loggerChildInfoMock = jest.fn();
const loggerChildErrorMock = jest.fn(error => {
// Write the error to console so it will be visible for who runs the test
console.error(error);
});
const loggerChildMock = jest.fn(() => ({
info: loggerChildInfoMock,
error: loggerChildErrorMock
}));
const EXAMPLE_LOGGER_FORMAT = Symbol('EXAMPLE_LOGGER_FORMAT');
const containerBuilderServiceGetStatusMock = jest.fn(
() => EXAMPLE_DOCKER_BUILD_RESULT_RUNNING
);
const createImageIdMock = jest.fn(tag => tag);
const actionServiceCompleteMock = jest.fn(() => ({}));
const userServiceFindUserMock = jest.fn(() => EXAMPLE_USER);
const deploymentAutoDeployToSandboxMock = jest.fn(() => EXAMPLE_DEPLOYMENT);
const getAppSettingsValuesMock = jest.fn(() => EXAMPLE_APP_SETTINGS_VALUES);
describe('BuildService', () => {
let service: BuildService;
beforeEach(async () => {
jest.clearAllMocks();
const module: TestingModule = await Test.createTestingModule({
providers: [
BuildService,
{
provide: ConfigService,
useValue: {
get: configServiceGetMock
}
},
{
provide: PrismaService,
useValue: {
build: {
create: prismaBuildCreateMock,
findMany: prismaBuildFindManyMock,
findUnique: prismaBuildFindOneMock,
update: prismaBuildUpdateMock
}
}
},
{
provide: StorageService,
useValue: {
registerDriver() {
return;
},
getDisk() {
return {
exists: storageServiceDiskExistsMock,
getStream: storageServiceDiskStreamMock,
put: storageServiceDiskPutMock,
getUrl: storageServiceDiskGetUrlMock
};
}
}
},
{
provide: EntityService,
useValue: {
getLatestVersions: entityServiceGetLatestVersionsMock,
getEntitiesByVersions: entityServiceGetEntitiesByVersionsMock
}
},
{
provide: AppRoleService,
useValue: {
getAppRoles: appRoleServiceGetAppRolesMock
}
},
{
provide: AppService,
useValue: {
app: appServiceGetAppMock
}
},
{
provide: ActionService,
useValue: {
run: actionServiceRunMock,
logInfo: actionServiceLogInfoMock,
complete: actionServiceCompleteMock
}
},
{
provide: ContainerBuilderService,
useValue: {
build: containerBuilderServiceBuildMock,
getStatus: containerBuilderServiceGetStatusMock,
createImageId: createImageIdMock
}
},
{
provide: LocalDiskService,
useValue: {
getDisk: localDiskServiceGetDiskMock
}
},
{
provide: DeploymentService,
useValue: {
findMany: deploymentFindManyMock,
autoDeployToSandbox: deploymentAutoDeployToSandboxMock,
canDeploy: true
}
},
{
provide: UserService,
useValue: {
findUser: userServiceFindUserMock
}
},
{
provide: AppSettingsService,
useValue: {
getAppSettingsValues: getAppSettingsValuesMock
}
},
{
provide: GithubService,
useValue: {}
},
{
provide: WINSTON_MODULE_PROVIDER,
useValue: {
error: loggerErrorMock,
child: loggerChildMock,
format: EXAMPLE_LOGGER_FORMAT
}
}
]
}).compile();
service = module.get<BuildService>(BuildService);
});
test('should be defined', () => {
expect(service).toBeDefined();
});
test('creates build', async () => {
// eslint-disable-next-line
// @ts-ignore
DataServiceGenerator.createDataService.mockImplementation(
() => EXAMPLE_MODULES
);
const args = {
data: {
createdBy: {
connect: {
id: EXAMPLE_USER_ID
}
},
app: {
connect: {
id: EXAMPLE_APP_ID
}
},
message: EXAMPLE_BUILD.message,
commit: {
connect: {
id: EXAMPLE_COMMIT_ID
}
}
}
};
const commitId = EXAMPLE_COMMIT_ID;
const version = commitId.slice(commitId.length - 8);
const latestEntityVersions = [{ id: EXAMPLE_ENTITY_VERSION_ID }];
expect(await service.create(args)).toEqual(
EXAMPLE_BUILD_INCLUDE_APP_AND_COMMIT
);
expect(entityServiceGetLatestVersionsMock).toBeCalledTimes(1);
expect(entityServiceGetLatestVersionsMock).toBeCalledWith({
where: { app: { id: EXAMPLE_APP_ID } }
});
expect(prismaBuildCreateMock).toBeCalledTimes(1);
expect(prismaBuildCreateMock).toBeCalledWith({
...args,
data: {
...args.data,
version,
createdAt: expect.any(Date),
blockVersions: {
connect: []
},
entityVersions: {
connect: latestEntityVersions.map(version => ({ id: version.id }))
},
action: {
create: {
steps: {
create: {
...EXAMPLE_CREATE_INITIAL_STEP_DATA,
completedAt: expect.any(Date)
}
}
}
}
},
include: {
commit: true,
app: true
}
});
expect(loggerChildMock).toBeCalledTimes(1);
expect(loggerChildMock).toBeCalledWith({
buildId: EXAMPLE_BUILD_ID
});
expect(loggerChildInfoMock).toBeCalledTimes(2);
expect(loggerChildInfoMock).toBeCalledWith(JOB_STARTED_LOG);
expect(loggerChildInfoMock).toBeCalledWith(JOB_DONE_LOG);
expect(loggerChildMock).toBeCalledTimes(1);
expect(loggerChildMock).toBeCalledWith({
buildId: EXAMPLE_BUILD_ID
});
expect(loggerChildInfoMock).toBeCalledTimes(2);
expect(loggerChildInfoMock.mock.calls).toEqual([
[JOB_STARTED_LOG],
[JOB_DONE_LOG]
]);
expect(loggerChildErrorMock).toBeCalledTimes(0);
expect(appServiceGetAppMock).toBeCalledTimes(1);
expect(appServiceGetAppMock).toBeCalledWith({
where: { id: EXAMPLE_APP_ID }
});
expect(entityServiceGetEntitiesByVersionsMock).toBeCalledTimes(1);
expect(entityServiceGetEntitiesByVersionsMock).toBeCalledWith({
where: {
builds: {
some: {
id: EXAMPLE_BUILD_ID
}
}
},
include: ENTITIES_INCLUDE
});
expect(appRoleServiceGetAppRolesMock).toBeCalledTimes(1);
expect(appRoleServiceGetAppRolesMock).toBeCalledWith({
where: {
app: {
id: EXAMPLE_APP_ID
}
}
});
expect(DataServiceGenerator.createDataService).toBeCalledTimes(1);
expect(DataServiceGenerator.createDataService).toBeCalledWith(
orderBy(EXAMPLE_ENTITIES, entity => entity.createdAt),
EXAMPLE_APP_ROLES,
{
name: EXAMPLE_APP.name,
description: EXAMPLE_APP.description,
version: EXAMPLE_BUILD.version,
id: EXAMPLE_APP.id,
url: `${EXAMPLED_HOST}/${EXAMPLE_APP.id}`,
settings: EXAMPLE_APP_SETTINGS_VALUES
},
MOCK_LOGGER
);
expect(winstonLoggerDestroyMock).toBeCalledTimes(1);
expect(winstonLoggerDestroyMock).toBeCalledWith();
expect(actionServiceRunMock).toBeCalledTimes(2);
expect(actionServiceRunMock.mock.calls).toEqual([
[
EXAMPLE_BUILD.actionId,
GENERATE_STEP_NAME,
GENERATE_STEP_MESSAGE,
expect.any(Function)
],
[
EXAMPLE_BUILD.actionId,
BUILD_DOCKER_IMAGE_STEP_NAME,
BUILD_DOCKER_IMAGE_STEP_MESSAGE,
expect.any(Function),
true
]
]);
expect(actionServiceLogInfoMock).toBeCalledTimes(4);
expect(actionServiceLogInfoMock.mock.calls).toEqual([
[EXAMPLE_ACTION_STEP, ACTION_ZIP_LOG],
[EXAMPLE_ACTION_STEP, ACTION_JOB_DONE_LOG],
[EXAMPLE_ACTION_STEP, BUILD_DOCKER_IMAGE_STEP_START_LOG],
[EXAMPLE_ACTION_STEP, BUILD_DOCKER_IMAGE_STEP_RUNNING_LOG]
]);
expect(actionServiceLogMock).toBeCalledTimes(0);
expect(storageServiceDiskGetUrlMock).toBeCalledTimes(1);
expect(storageServiceDiskGetUrlMock).toBeCalledWith(
getBuildTarGzFilePath(EXAMPLE_BUILD.id)
);
expect(localDiskServiceGetDiskMock).toBeCalledTimes(0);
expect(containerBuilderServiceBuildMock).toBeCalledTimes(1);
expect(containerBuilderServiceBuildMock).toBeCalledWith({
tags: [
`${EXAMPLE_BUILD.appId}:${EXAMPLE_BUILD.id}`,
`${EXAMPLE_BUILD.appId}:latest`
],
cacheFrom: [`${EXAMPLE_BUILD.appId}:latest`],
url: EXAMPLE_URL
});
expect(prismaBuildUpdateMock).toBeCalledTimes(1);
expect(prismaBuildUpdateMock).toBeCalledWith({
where: {
id: EXAMPLE_BUILD_ID
},
data: {
containerStatusQuery: EXAMPLE_DOCKER_BUILD_RESULT_RUNNING.statusQuery,
containerStatusUpdatedAt: expect.any(Date)
}
});
expect(winstonConsoleTransportOnMock).toBeCalledTimes(1);
/** @todo add expect(winstonConsoleTransportOnMock).toBeCalledWith() */
expect(winstonLoggerDestroyMock).toBeCalledTimes(1);
expect(winstonLoggerDestroyMock).toBeCalledWith();
expect(winston.createLogger).toBeCalledTimes(1);
/** @todo add expect(winston.createLogger).toBeCalledWith() */
expect(winston.transports.Console).toBeCalledTimes(1);
expect(winston.transports.Console).toBeCalledWith();
});
test('find many builds', async () => {
const args = {};
expect(await service.findMany(args)).toEqual([EXAMPLE_BUILD]);
expect(prismaBuildFindManyMock).toBeCalledTimes(1);
expect(prismaBuildFindManyMock).toBeCalledWith(args);
});
test('find one build', async () => {
prismaBuildFindOneMock.mockImplementation(() => EXAMPLE_BUILD);
const args: FindOneBuildArgs = {
where: {
id: EXAMPLE_BUILD_ID
}
};
expect(await service.findOne(args)).toEqual(EXAMPLE_BUILD);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(args);
});
test('do not find non existing build', async () => {
prismaBuildFindOneMock.mockImplementation(() => null);
const args: FindOneBuildArgs = {
where: {
id: 'nonExistingId'
}
};
expect(await service.findOne(args)).toEqual(null);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(args);
});
test('create download stream for build', async () => {
prismaBuildFindOneMock.mockImplementation(() =>
Object.assign(Promise.resolve(EXAMPLE_BUILD), {
action: () => ({
steps: () => [
{
name: GENERATE_STEP_NAME,
status: EnumActionStepStatus.Success
}
]
})
})
);
const args: FindOneBuildArgs = {
where: {
id: EXAMPLE_COMPLETED_BUILD.id
}
};
expect(await service.download(args)).toEqual(EXAMPLE_STREAM);
expect(prismaBuildFindOneMock).toBeCalledTimes(2);
expect(prismaBuildFindOneMock).toBeCalledWith(args);
const buildFilePath = getBuildZipFilePath(EXAMPLE_COMPLETED_BUILD.id);
expect(storageServiceDiskExistsMock).toBeCalledTimes(1);
expect(storageServiceDiskExistsMock).toBeCalledWith(buildFilePath);
expect(storageServiceDiskStreamMock).toBeCalledTimes(1);
expect(storageServiceDiskStreamMock).toBeCalledWith(buildFilePath);
});
test('fail to create download stream for a non existing build', async () => {
prismaBuildFindOneMock.mockImplementation(() => null);
const args: FindOneBuildArgs = {
where: {
id: 'nonExistingId'
}
};
await expect(service.download(args)).rejects.toThrow(BuildNotFoundError);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(args);
expect(storageServiceDiskExistsMock).toBeCalledTimes(0);
expect(storageServiceDiskStreamMock).toBeCalledTimes(0);
});
/**
* fail to get generated app archive for non existing step
* fail to get generated app archive for uncompleted step
*/
test('get deployments', async () => {
await expect(service.getDeployments(EXAMPLE_BUILD_ID, {}));
expect(deploymentFindManyMock).toBeCalledTimes(1);
expect(deploymentFindManyMock).toBeCalledWith({
where: {
build: {
id: EXAMPLE_BUILD_ID
}
}
});
});
it('should return invalid', async () => {
prismaBuildFindOneMock.mockImplementation(() => EXAMPLE_INVALID_BUILD);
const invalid = EnumBuildStatus.Invalid;
const buildId = EXAMPLE_INVALID_BUILD.id;
const findOneArgs = {
where: { id: buildId },
include: ACTION_INCLUDE
};
expect(await service.calcBuildStatus(buildId)).toEqual(invalid);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(findOneArgs);
});
it('should return build status Running', async () => {
prismaBuildFindOneMock.mockImplementation(() => EXAMPLE_RUNNING_BUILD);
const buildId = EXAMPLE_RUNNING_BUILD.id;
const findOneArgs = {
where: { id: buildId },
include: ACTION_INCLUDE
};
expect(await service.calcBuildStatus(buildId)).toEqual(
EnumBuildStatus.Running
);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(findOneArgs);
});
it('should return build status Failed', async () => {
prismaBuildFindOneMock.mockImplementation(() => EXAMPLE_FAILED_BUILD);
const buildId = EXAMPLE_FAILED_BUILD.id;
const findOneArgs = {
where: { id: buildId },
include: ACTION_INCLUDE
};
expect(await service.calcBuildStatus(buildId)).toEqual(
EnumBuildStatus.Failed
);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(findOneArgs);
});
it('should return build status Completed', async () => {
prismaBuildFindOneMock.mockImplementation(() => EXAMPLE_COMPLETED_BUILD);
const buildId = EXAMPLE_COMPLETED_BUILD.id;
const findOneArgs = {
where: { id: buildId },
include: ACTION_INCLUDE
};
expect(await service.calcBuildStatus(buildId)).toEqual(
EnumBuildStatus.Completed
);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith(findOneArgs);
});
it('should try to get build status and return Running', async () => {
prismaBuildFindOneMock.mockImplementation(() => ({
...EXAMPLE_BUILD,
action: {
...EXAMPLE_BUILD.action,
steps: [EXAMPLE_ACTION_STEP]
}
}));
expect(await service.calcBuildStatus(EXAMPLE_BUILD_ID)).toEqual(
EnumBuildStatus.Running
);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith({
where: { id: EXAMPLE_BUILD_ID },
include: ACTION_INCLUDE
});
});
it('should try to get build status, catch an error and return Failed', async () => {
prismaBuildFindOneMock.mockImplementation(() => ({
...EXAMPLE_BUILD,
action: {
...EXAMPLE_BUILD.action,
steps: [EXAMPLE_FAILED_ACTION_STEP]
}
}));
expect(await service.calcBuildStatus(EXAMPLE_BUILD_ID)).toEqual(
EnumBuildStatus.Failed
);
expect(prismaBuildFindOneMock).toBeCalledTimes(1);
expect(prismaBuildFindOneMock).toBeCalledWith({
where: { id: EXAMPLE_BUILD_ID },
include: ACTION_INCLUDE
});
});
it('should update running build status', async () => {
prismaBuildFindManyMock.mockImplementation(() => [
EXAMPLE_RUNNING_DELAYED_BUILD
]);
const findManyArgs = {
where: {
containerStatusUpdatedAt: {
lt: expect.any(Date)
},
action: {
steps: {
some: {
status: {
equals: EnumActionStepStatus.Running
},
name: {
equals: BUILD_DOCKER_IMAGE_STEP_NAME
}
}
}
}
},
orderBy: {
createdAt: Prisma.SortOrder.asc
},
include: ACTION_INCLUDE
};
expect(await service.updateRunningBuildsStatus()).toEqual(undefined);
expect(prismaBuildFindManyMock).toBeCalledTimes(1);
expect(prismaBuildFindManyMock).toBeCalledWith(findManyArgs);
expect(containerBuilderServiceGetStatusMock).toBeCalledTimes(1);
expect(containerBuilderServiceGetStatusMock).toBeCalledWith(
EXAMPLE_RUNNING_BUILD.containerStatusQuery
);
expect(actionServiceLogInfoMock).toBeCalledTimes(1);
expect(actionServiceLogInfoMock).toBeCalledWith(
EXAMPLE_DOCKER_IMAGE_STEP,
BUILD_DOCKER_IMAGE_STEP_RUNNING_LOG
);
expect(prismaBuildUpdateMock).toBeCalledTimes(1);
expect(prismaBuildUpdateMock).toBeCalledWith({
where: { id: EXAMPLE_RUNNING_DELAYED_BUILD.id },
data: {
containerStatusQuery: EXAMPLE_DOCKER_BUILD_RESULT_RUNNING.statusQuery,
containerStatusUpdatedAt: expect.any(Date)
}
});
});
it('should try update running build status but catch an error', async () => {
const EXAMPLE_ERROR = new Error('exampleError');
prismaBuildFindManyMock.mockImplementation(() => [
EXAMPLE_RUNNING_DELAYED_BUILD
]);
containerBuilderServiceGetStatusMock.mockImplementation(() => {
throw EXAMPLE_ERROR;
});
const findManyArgs = {
where: {
containerStatusUpdatedAt: {
lt: expect.any(Date)
},
action: {
steps: {
some: {
status: {
equals: EnumActionStepStatus.Running
},
name: {
equals: BUILD_DOCKER_IMAGE_STEP_NAME
}
}
}
}
},
orderBy: {
createdAt: Prisma.SortOrder.asc
},
include: ACTION_INCLUDE
};
expect(await service.updateRunningBuildsStatus()).toEqual(undefined);
expect(prismaBuildFindManyMock).toBeCalledTimes(1);
expect(prismaBuildFindManyMock).toBeCalledWith(findManyArgs);
expect(containerBuilderServiceGetStatusMock).toBeCalledTimes(1);
expect(containerBuilderServiceGetStatusMock).toBeCalledWith(
EXAMPLE_RUNNING_BUILD.containerStatusQuery
);
expect(actionServiceLogInfoMock).toBeCalledTimes(1);
expect(actionServiceLogInfoMock).toBeCalledWith(
EXAMPLE_DOCKER_IMAGE_STEP,
EXAMPLE_ERROR
);
expect(actionServiceCompleteMock).toBeCalledTimes(1);
expect(actionServiceCompleteMock).toBeCalledWith(
EXAMPLE_DOCKER_IMAGE_STEP,
EnumActionStepStatus.Failed
);
});
it('should handle container builder completed result', async () => {
expect(
await service.handleContainerBuilderResult(
EXAMPLE_BUILD,
EXAMPLE_ACTION_STEP,
EXAMPLE_COMPLETED_BUILD_RESULT
)
).toEqual(undefined);
expect(actionServiceLogInfoMock).toBeCalledTimes(1);
expect(actionServiceLogInfoMock).toBeCalledWith(
EXAMPLE_ACTION_STEP,
BUILD_DOCKER_IMAGE_STEP_FINISH_LOG,
{ images: EXAMPLE_COMPLETED_BUILD_RESULT.images }
);
expect(actionServiceCompleteMock).toBeCalledTimes(1);
expect(actionServiceCompleteMock).toBeCalledWith(
EXAMPLE_ACTION_STEP,
EnumActionStepStatus.Success
);
expect(prismaBuildUpdateMock).toBeCalledTimes(1);
expect(prismaBuildUpdateMock).toBeCalledWith({
where: { id: EXAMPLE_BUILD_ID },
data: { images: { set: EXAMPLE_COMPLETED_BUILD_RESULT.images } }
});
expect(deploymentAutoDeployToSandboxMock).toBeCalledTimes(1);
expect(deploymentAutoDeployToSandboxMock).toBeCalledWith(EXAMPLE_BUILD);
});
it('should handle container builder failed result', async () => {
expect(
await service.handleContainerBuilderResult(
EXAMPLE_BUILD,
EXAMPLE_ACTION_STEP,
EXAMPLE_FAILED_BUILD_RESULT
)
).toEqual(undefined);
expect(actionServiceLogInfoMock).toBeCalledTimes(1);
expect(actionServiceLogInfoMock).toBeCalledWith(
EXAMPLE_ACTION_STEP,
BUILD_DOCKER_IMAGE_STEP_FAILED_LOG
);
expect(actionServiceCompleteMock).toBeCalledTimes(1);
expect(actionServiceCompleteMock).toBeCalledWith(
EXAMPLE_ACTION_STEP,
EnumActionStepStatus.Failed
);
});
it('should handle container builder running result', async () => {
expect(
await service.handleContainerBuilderResult(
EXAMPLE_BUILD,
EXAMPLE_ACTION_STEP,
EXAMPLE_RUNNING_BUILD_RESULT
)
).toEqual(undefined);
expect(actionServiceLogInfoMock).toBeCalledTimes(1);
expect(actionServiceLogInfoMock).toBeCalledWith(
EXAMPLE_ACTION_STEP,
BUILD_DOCKER_IMAGE_STEP_RUNNING_LOG
);
expect(prismaBuildUpdateMock).toBeCalledTimes(1);
expect(prismaBuildUpdateMock).toBeCalledWith({
where: { id: EXAMPLE_BUILD_ID },
data: {
containerStatusQuery: EXAMPLE_RUNNING_BUILD_RESULT.statusQuery,
containerStatusUpdatedAt: expect.any(Date)
}
});
});
}); | the_stack |
import * as d3 from 'd3';
import { interpolatePath } from 'd3-interpolate-path';
import './LinePlot.scss';
import { Plot } from '../../Interfaces/Plot';
import Utils from '../../Utils';
import { DataTypes, YAxisStates } from "../../Constants/Enums";
import { AxisState } from '../../Models/AxisState';
class LinePlot extends Plot {
private defs;
private chartWidth;
private y;
private visibleAggCount;
private strokeOpacity;
private previousIncludeDots;
private areaPath;
private yAxisState: AxisState;
constructor (svgSelection) {
super(svgSelection);
this.plotDataType = DataTypes.Numeric;
}
private getXPosition (d, x) {
var bucketSize = this.chartComponentData.displayState[d.aggregateKey].bucketSize;
if (bucketSize) {
return (x(d.dateTime) + x((new Date(d.dateTime.valueOf() + bucketSize)))) / 2;
}
return x(d.dateTime);
}
private createAreaPath (y) {
this.areaPath = d3.area()
.curve(this.chartOptions.interpolationFunction)
.defined( (d: any) => {
return (d.measures !== null) &&
(d.measures[this.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)] !== null);
})
.x((d: any) => {
return this.getXPosition(d, this.x);
})
.y0((d: any) => {
return d.measures ? y(d.measures[this.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)]) : 0;
})
.y1(this.chartHeight);
}
// returns the next visibleAggI
public render (chartOptions, visibleAggI, agg, aggVisible: boolean, aggregateGroup, chartComponentData, yAxisState: AxisState,
chartHeight, visibleAggCount, colorMap, previousAggregateData, x, areaPath, strokeOpacity, y, yMap, defs, chartDataOptions,
previousIncludeDots, yTopAndHeight, svgSelection, categoricalMouseover, categoricalMouseout, yAxisOnClick) {
this.previousIncludeDots = previousIncludeDots;
this.defs = defs;
this.chartOptions = chartOptions;
this.chartHeight = chartHeight;
this.visibleAggCount = visibleAggCount;
this.chartComponentData = chartComponentData;
this.x = x;
this.y = y;
let aggKey = agg.aggKey;
this.aggregateGroup = aggregateGroup;
const yAxisHasOnClick = yAxisOnClick && typeof yAxisOnClick === "function";
visibleAggI = yAxisState.positionInGroup;
this.yTop = yTopAndHeight[0];
this.height = yTopAndHeight[1];
let aggY;
let aggLine;
let aggEnvelope;
let aggGapLine;
this.yAxisState = yAxisState;
let yExtent = this.yAxisState.yExtent;
aggY = d3.scaleLinear();
aggY.range([this.height, this.chartOptions.aggTopMargin]);
if (this.chartComponentData.aggHasVisibleSplitBys(aggKey)) {
var yRange = (yExtent[1] - yExtent[0]) > 0 ? yExtent[1] - yExtent[0] : 1;
var yOffsetPercentage = 10 / (this.chartHeight / ((this.yAxisState.axisType === YAxisStates.Overlap) ? 1 : this.visibleAggCount));
let yDomainMin = this.chartOptions.isArea ?
(Math.max(yExtent[0] - (yRange * yOffsetPercentage), 0)) :
(yExtent[0] - (yRange * yOffsetPercentage));
aggY.domain([yDomainMin, yExtent[1] + (yRange * (10 / this.chartHeight))]);
} else {
aggY.domain([0,1]);
yExtent = [0, 1];
}
aggLine = d3.line()
.curve(this.chartComponentData.displayState[aggKey].interpolationFunction ? d3[this.chartComponentData.displayState[aggKey].interpolationFunction] : this.chartOptions.interpolationFunction)
.defined((d: any) => {
return (d.measures !== null) &&
(d.measures[this.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)] !== null);
})
.x((d: any) => this.getXPosition(d, this.x))
.y((d: any) => {
return d.measures ? aggY(d.measures[this.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)]) : null;
});
aggEnvelope = d3.area()
.curve(this.chartComponentData.displayState[aggKey].interpolationFunction ? d3[this.chartComponentData.displayState[aggKey].interpolationFunction] : this.chartOptions.interpolationFunction)
.defined((d: any) => (d.measures !== null) && (d.measures['min'] !== null) && (d.measures['max'] !== null))
.x((d: any) => this.getXPosition(d, this.x))
.y0((d: any) => d.measures ? aggY(d.measures['max']) : 0)
.y1((d: any) => d.measures ? aggY(d.measures['min']) : 0);
aggGapLine = aggLine;
let localY = aggY.copy();
localY.range([this.yTop + this.height, this.yTop + this.chartOptions.aggTopMargin]);
yMap[aggKey] = localY;
var yAxis: any = this.aggregateGroup.selectAll(".yAxis")
.data([aggKey]);
var visibleYAxis = (aggVisible && (this.yAxisState.axisType !== YAxisStates.Shared || visibleAggI === 0));
yAxis = yAxis.enter()
.append("g")
.attr("class", `yAxis ${yAxisHasOnClick ? `tsi-clickableYAxis tsi-swimLaneAxis-${this.chartComponentData.displayState[aggKey].aggregateExpression.swimLane}` : ''}`)
.merge(yAxis)
.style("visibility", ((visibleYAxis && !this.chartOptions.yAxisHidden) ? "visible" : "hidden"));
if (this.yAxisState.axisType === YAxisStates.Overlap) {
yAxis.call(d3.axisLeft(aggY).tickFormat(Utils.formatYAxisNumber).tickValues(yExtent))
.selectAll("text")
.attr("y", (d, j) => {return (j == 0) ? (-visibleAggI * 16) : (visibleAggI * 16) })
.style("fill", this.chartComponentData.displayState[aggKey].color);
}
else {
yAxis.call(d3.axisLeft(aggY).tickFormat(Utils.formatYAxisNumber)
.ticks(Math.max(2, Math.ceil(this.height/(this.yAxisState.axisType === YAxisStates.Stacked ? this.visibleAggCount : 1)/90))))
.selectAll("text").classed("standardYAxisText", true)
}
// If yAxisOnClick present, attach to yAxis
if(yAxisHasOnClick){
yAxis.on("click", () => {
yAxisOnClick();
})
let label = document.getElementsByClassName(`tsi-swimLaneLabel-${agg.swimLane}`)[0];
if(label){
yAxis.on("mouseover", () => {
label.classList.add("tsi-axisHover");
yAxis.selectAll("text").classed("tsi-boldYAxisText", true)
})
yAxis.on("mouseout", () => {
label.classList.remove("tsi-axisHover");
yAxis.selectAll("text").classed("tsi-boldYAxisText", false)
})
}
}
yAxis.exit().remove();
var guideLinesData = {
x: this.x,
y: aggY,
visible: visibleYAxis
};
let splitByColors = Utils.createSplitByColors(this.chartComponentData.displayState, aggKey, this.chartOptions.keepSplitByColor);
let includeDots = this.chartOptions.includeDots || this.chartComponentData.displayState[aggKey].includeDots;
let self = this;
let splitByGroups = this.aggregateGroup.selectAll(".tsi-splitByGroup")
.data(Object.keys(this.chartComponentData.timeArrays[aggKey]));
splitByGroups.enter()
.append("g")
.attr("class", "tsi-splitByGroup " + agg.aggKey)
.merge(splitByGroups)
.each(function (splitBy, j) {
colorMap[aggKey + "_" + splitBy] = splitByColors[j];
// creation of segments between each gap in the data
var segments = [];
var lineData = self.chartComponentData.timeArrays[aggKey][splitBy];
var visibleMeasure = self.chartComponentData.getVisibleMeasure(aggKey, splitBy);
for (var i = 0; i < lineData.length - 1; i++) {
if (lineData[i].measures !== null && lineData[i].measures[visibleMeasure] !== null) {
var scannerI: number = i + 1;
while(scannerI < lineData.length && ((lineData[scannerI].measures == null) ||
lineData[scannerI].measures[visibleMeasure] == null)) {
scannerI++;
}
if (scannerI < lineData.length && scannerI != i + 1) {
segments.push([lineData[i], lineData[scannerI]]);
}
i = scannerI - 1;
}
}
var durationFunction = (d) => {
let previousUndefined = previousAggregateData.get(this) === undefined;
return (self.chartOptions.noAnimate || previousUndefined) ? 0 : self.TRANSDURATION
}
var gapPath = d3.select(this).selectAll(".tsi-gapLine")
.data(segments.map((d) => {
d.inTransition = true;
return d;
}));
gapPath.enter()
.append("path")
.attr("class", "tsi-valueElement tsi-gapLine")
.merge(gapPath)
.style("visibility", (d: any) => {
return (self.chartComponentData.isSplitByVisible(aggKey, splitBy)) ? "visible" : "hidden";
})
.transition()
.duration(durationFunction)
.ease(d3.easeExp)
.attr("stroke-dasharray","5,5")
.attr("stroke", splitByColors[j])
.attrTween('d', function (d) {
var previous = d3.select(this).attr('d');
var current = aggLine(d);
return interpolatePath(previous, current);
})
.on('end', (d: any) => {
d.inTransition = false;
});
var path = d3.select(this).selectAll(".tsi-valueLine")
.data([self.chartComponentData.timeArrays[aggKey][splitBy]].map(d => {
d.inTransition = true;
return d;
}));
path.enter()
.append("path")
.attr("class", "tsi-valueElement tsi-valueLine")
.merge(path)
.style("visibility", (d: any) => {
return (self.chartComponentData.isSplitByVisible(aggKey, splitBy)) ? "visible" : "hidden";
})
.transition()
.duration(durationFunction)
.ease(d3.easeExp)
.attr("stroke", splitByColors[j])
.attr("stroke-opacity", self.strokeOpacity)
.attrTween('d', function (d) {
var previous = d3.select(this).attr('d');
var current = aggLine(d);
return interpolatePath(previous, current);
})
.on('end', (d: any) => {
d.inTransition = false;
});
if (self.chartOptions.includeDots || self.chartComponentData.displayState[aggKey].includeDots) {
let dots = d3.select(this).selectAll(".tsi-valueDot")
.data(self.chartComponentData.timeArrays[aggKey][splitBy].filter((d) => {
return d && d.measures && d.measures[self.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)] !== null;
}), (d: any, i) => {
return d.dateTime.toString();
});
dots.enter()
.append('circle')
.attr('class', 'tsi-valueElement tsi-valueDot')
.attr('r', 3)
.merge(dots)
.style("visibility", (d: any) => {
return (self.chartComponentData.isSplitByVisible(aggKey, splitBy) && d.measures) ? "visible" : "hidden";
})
.transition()
.duration(function (d, i) {
return (self.previousIncludeDots.get(this) === true) ? durationFunction(d) : 0;
})
.ease(d3.easeExp)
.attr("fill", splitByColors[j])
.attr('cx', (d: any) => self.getXPosition(d, self.x))
.attr('cy', (d: any) => {
return d.measures ? aggY(d.measures[self.chartComponentData.getVisibleMeasure(d.aggregateKey, d.splitBy)]) : null;
})
.each(function () {
self.previousIncludeDots.set(this, includeDots);
})
dots.exit().remove();
} else {
d3.select(this).selectAll(".tsi-valueDot").remove();
}
let envelopeData = {};
if ((self.chartComponentData.displayState[aggKey].includeEnvelope || self.chartOptions.includeEnvelope) && self.chartComponentData.isPossibleEnvelope(aggKey, splitBy)) {
envelopeData = self.chartComponentData.timeArrays[aggKey][splitBy].map((d: any) => ({...d, isEnvelope: true}));
}
let envelope = d3.select(this).selectAll(".tsi-valueEnvelope")
.data([envelopeData]);
envelope.enter()
.append("path")
.attr("class", "tsi-valueElement tsi-valueEnvelope")
.merge(envelope)
.style("visibility", (d: any) => {
return (self.chartComponentData.isSplitByVisible(aggKey, splitBy)) ? "visible" : "hidden";
})
.transition()
.duration(durationFunction)
.ease(d3.easeExp)
.style("fill", splitByColors[j])
.attr("fill-opacity", .2)
.attr("d", aggEnvelope);
if (self.chartOptions.isArea) {
self.createAreaPath(aggY);
var area = d3.select(this).selectAll(".tsi-valueArea")
.data([self.chartComponentData.timeArrays[aggKey][splitBy]]);
// logic for shiny gradient fill via url()
let svgId = Utils.guid();
let lg = self.defs.selectAll('linearGradient')
.data([self.chartComponentData.timeArrays[aggKey][splitBy]]);
var gradient = lg.enter()
.append('linearGradient');
gradient.merge(lg)
.attr('id', svgId).attr('x1', '0%').attr('x2', '0%').attr('y1', '0%').attr('y2', '100%');
gradient.append('stop').attr('offset', '0%').attr('style', () =>{return 'stop-color:' + splitByColors[j] + ';stop-opacity:.2'});
gradient.append('stop').attr('offset', '100%').attr('style', () =>{return 'stop-color:' + splitByColors[j] + ';stop-opacity:.03'});
lg.exit().remove();
area.enter()
.append("path")
.attr("class", "tsi-valueArea")
.merge(area)
.style("fill", 'url(#' + (svgId) + ')')
.style("visibility", (d: any) => {
return (self.chartComponentData.isSplitByVisible(aggKey, splitBy)) ? "visible" : "hidden";
})
.transition()
.duration(durationFunction)
.ease(d3.easeExp)
.attr("d", self.areaPath);
area.exit().remove();
}
gapPath.exit().remove();
path.exit().remove();
previousAggregateData.set(this, splitBy);
});
splitByGroups.exit().remove();
}
}
export default LinePlot; | the_stack |
import { Color, GradientColor } from "../util/color"
import { Modeling, Model, obj2Model, ClassType } from "../util/types";
import { uniqueId } from "../util/uniqueId";
import { loge } from "../util/log";
import { BridgeContext } from "../runtime/global";
import { LayoutConfig } from '../util/layoutconfig'
import { IAnimation } from "./animation";
import { FlexConfig } from "../util/flexbox";
export function Property(target: View, propKey: string) {
Object.defineProperty(target, propKey, {
get: function () {
return Reflect.get(this, `__prop__${propKey}`, this)
},
set: function (v) {
const oldV = Reflect.get(this, `__prop__${propKey}`, this)
Reflect.set(this, `__prop__${propKey}`, v, this)
if (oldV !== v) {
Reflect.apply(this.onPropertyChanged, this, [propKey, oldV, v])
}
},
})
}
export function InconsistProperty(target: Object, propKey: string) {
Object.defineProperty(target, propKey, {
get: function () {
return Reflect.get(this, `__prop__${propKey}`, this)
},
set: function (v) {
const oldV = Reflect.get(this, `__prop__${propKey}`, this)
Reflect.set(this, `__prop__${propKey}`, v, this)
Reflect.apply(this.onPropertyChanged, this, [propKey, oldV, v])
},
})
}
const PROP_KEY_VIEW_TYPE = "__prop__ViewType";
export function ViewComponent(constructor: ClassType<any>) {
const name = Reflect.get(constructor, PROP_KEY_VIEW_TYPE) || Object.getPrototypeOf(constructor).name
Reflect.set(constructor, PROP_KEY_VIEW_TYPE, name)
}
export type NativeViewModel = {
id: string;
type: string;
props: {
[index: string]: Model;
};
}
export class Ref<T extends View> {
private view?: T;
set current(v: T) {
this.view = v
}
get current() {
if (!!!this.view) {
throw new Error("Ref is empty")
}
return this.view
}
}
export function createRef() {
return new Ref
}
export abstract class View implements Modeling {
private __dirty_props__!: { [index: string]: Model | undefined }
@Property
width: number = 0
@Property
height: number = 0
@Property
x: number = 0
@Property
y: number = 0
@Property
backgroundColor?: Color | GradientColor
@Property
corners?: number | { leftTop?: number; rightTop?: number; leftBottom?: number; rightBottom?: number }
@Property
border?: { width: number; color: Color; }
@Property
shadow?: { color: Color; opacity: number; radius: number; offsetX: number; offsetY: number }
@Property
alpha?: number
@Property
hidden?: boolean
viewId = uniqueId('ViewId')
@Property
padding?: {
left?: number,
right?: number,
top?: number,
bottom?: number,
}
@Property
layoutConfig?: LayoutConfig
@Property
onClick?: Function
superview?: Superview
callbacks!: Map<String, Function>
private callback2Id(f: Function) {
if (this.callbacks === undefined) {
this.callbacks = new Map
}
const id = uniqueId('Function')
this.callbacks.set(id, f)
return id
}
private id2Callback(id: string) {
if (this.callbacks === undefined) {
this.callbacks = new Map
}
let f = this.callbacks.get(id)
if (f === undefined) {
f = Reflect.get(this, id) as Function
}
return f
}
/** Anchor start*/
get left() {
return this.x
}
set left(v: number) {
this.x = v
}
get right() {
return this.x + this.width
}
set right(v: number) {
this.x = v - this.width
}
get top() {
return this.y
}
set top(v: number) {
this.y = v
}
get bottom() {
return this.y + this.height
}
set bottom(v: number) {
this.y = v - this.height
}
get centerX() {
return this.x + this.width / 2
}
get centerY() {
return this.y + this.height / 2
}
set centerX(v: number) {
this.x = v - this.width / 2
}
set centerY(v: number) {
this.y = v - this.height / 2
}
/** Anchor end*/
get dirtyProps() {
return this.__dirty_props__
}
nativeViewModel: NativeViewModel = {
id: this.viewId,
type: this.viewType(),
props: this.__dirty_props__,
}
viewType() {
return Reflect.get(this.constructor, PROP_KEY_VIEW_TYPE) || this.constructor.name
}
onPropertyChanged(propKey: string, oldV: Model, newV: Model): void {
if (newV instanceof Function) {
newV = this.callback2Id(newV)
} else {
newV = obj2Model(newV, (v) => this.callback2Id(v))
}
if (this.__dirty_props__ === undefined) {
this.__dirty_props__ = {}
}
this.__dirty_props__[propKey] = newV
}
clean() {
for (const key in this.__dirty_props__) {
if (Reflect.has(this.__dirty_props__, key)) {
Reflect.deleteProperty(this.__dirty_props__, key)
}
}
}
isDirty() {
return Reflect.ownKeys(this.__dirty_props__).length !== 0
}
responseCallback(id: string, ...args: any) {
const f = this.id2Callback(id)
if (f instanceof Function) {
const argumentsList: any = []
for (let i = 1; i < arguments.length; i++) {
argumentsList.push(arguments[i])
}
return Reflect.apply(f, this, argumentsList)
} else {
loge(`Cannot find callback:${id} for ${JSON.stringify(this.toModel())}`)
}
}
toModel() {
return this.nativeViewModel
}
let(block: (it: this) => void) {
block(this)
}
also(block: (it: this) => void) {
block(this)
return this
}
apply(config: Partial<this>) {
for (let key in config) {
Reflect.set(this, key, Reflect.get(config, key, config), this)
}
return this
}
in(group: Group) {
group.addChild(this)
return this
}
nativeChannel(context: BridgeContext, name: string) {
let thisView: View | undefined = this
return function (args: any = undefined) {
const viewIds = []
while (thisView != undefined) {
viewIds.push(thisView.viewId)
thisView = thisView.superview
}
const params = {
viewIds: viewIds.reverse(),
name,
args,
}
return context.callNative('shader', 'command', params) as Promise<any>
}
}
getWidth(context: BridgeContext) {
return this.nativeChannel(context, 'getWidth')() as Promise<number>
}
getHeight(context: BridgeContext) {
return this.nativeChannel(context, 'getHeight')() as Promise<number>
}
getX(context: BridgeContext) {
return this.nativeChannel(context, 'getX')() as Promise<number>
}
getY(context: BridgeContext) {
return this.nativeChannel(context, 'getY')() as Promise<number>
}
getLocationOnScreen(context: BridgeContext) {
return this.nativeChannel(context, "getLocationOnScreen")() as Promise<{ x: number, y: number }>
}
/**++++++++++transform++++++++++*/
@Property
translationX?: number
@Property
translationY?: number
@Property
scaleX?: number
@Property
scaleY?: number
@Property
pivotX?: number
@Property
pivotY?: number
@Property
rotation?: number
/**
* rotation*PI
* In X
*/
@Property
rotationX?: number
/**
* rotation*PI
* In Y
*/
@Property
rotationY?: number
/**
* Determines the distance between the z=0 plane and the user in order to give a 3D-positioned element some perspective.
*/
@Property
perspective?: number
/**----------transform----------*/
@Property
flexConfig?: FlexConfig
set props(props: Partial<this>) {
this.apply(props)
}
set parent(v: Group) {
this.in(v)
}
set ref(ref: Ref<this>) {
ref.current = this
}
doAnimation(context: BridgeContext, animation: IAnimation) {
return this.nativeChannel(context, "doAnimation")(animation.toModel()).then((args) => {
for (let key in args) {
Reflect.set(this, key, Reflect.get(args, key, args), this)
Reflect.deleteProperty(this.__dirty_props__, key)
}
})
}
clearAnimation(context: BridgeContext, animation: IAnimation) {
return this.nativeChannel(context, "clearAnimation")(animation.id).then(() => {
this.__dirty_props__.translationX = this.translationX || 0
this.__dirty_props__.translationY = this.translationY || 0
this.__dirty_props__.scaleX = this.scaleX || 1
this.__dirty_props__.scaleY = this.scaleY || 1
this.__dirty_props__.rotation = this.rotation || 0
})
}
cancelAnimation(context: BridgeContext, animation: IAnimation) {
return this.nativeChannel(context, "cancelAnimation")(animation.id).then((args) => {
for (let key in args) {
Reflect.set(this, key, Reflect.get(args, key, args), this)
Reflect.deleteProperty(this.__dirty_props__, key)
}
})
}
}
export abstract class Superview extends View {
subviewById(id: string): View | undefined {
for (let v of this.allSubviews()) {
if (v.viewId === id) {
return v
}
}
}
abstract allSubviews(): Iterable<View>
isDirty() {
if (super.isDirty()) {
return true
} else {
for (const v of this.allSubviews()) {
if (v.isDirty()) {
return true
}
}
}
return false
}
clean() {
for (let v of this.allSubviews()) {
v.clean()
}
super.clean()
}
toModel() {
const subviews = []
for (let v of this.allSubviews()) {
if (v != undefined) {
if (v.superview && v.superview !== this) {
//It had been added to another view, need to be marked totally
for (let key in v) {
if (key.startsWith("__prop__")) {
v.onPropertyChanged(key, undefined, Reflect.get(v, key))
}
if (v instanceof Superview) {
for (const subview of v.allSubviews()) {
subview.superview = {} as Superview
}
}
if (v instanceof Group) {
v.dirtyProps.children = v.children.map(e => e.viewId)
}
}
}
v.superview = this
if (v.isDirty()) {
subviews.push(v.toModel())
}
}
}
this.dirtyProps.subviews = subviews
return super.toModel()
}
}
export type ViewArray = View[]
export type ViewFragment = View | ViewArray
export abstract class Group extends Superview {
readonly children: View[] = []
allSubviews() {
return this.children
}
addChild(view: View) {
this.children.push(view)
this.dirtyProps.children = this.children.map(e => e.viewId)
}
private addInnerElement(e: View | ViewFragment | ViewFragment[] | undefined | null) {
if (e instanceof Array) {
e.forEach(e => this.addInnerElement(e))
} else if (e instanceof View) {
this.addChild(e)
} else {
loge(`Not allowed to add ${typeof e}`)
}
}
set innerElement(e: View | ViewFragment | ViewFragment[] | undefined | null) {
this.addInnerElement(e)
}
} | the_stack |
import {
format,
isSameMonth,
isWeekend,
differenceInCalendarDays,
isBefore,
addDays,
startOfWeek,
endOfWeek,
getSeconds,
differenceInHours,
getMinutes,
getHours,
addHours,
isSameDay,
endOfDay,
startOfDay,
toDate,
Locale
} from 'date-fns';
import { EventObject, ParsedRangeEvent } from './foundation';
const copyEvent = (event: EventObject, date: Date, start?: Date, end?: Date, allDay = false) => {
const copied = { ...event };
copied.date = date;
start ? copied.start = start : null;
end ? copied.end = end : null;
copied.allDay = allDay;
return copied;
};
const isDateInRange = (dirtyDate: Date, dirtyStart: Date, dirtyEnd: Date) => {
const date = toDate(dirtyDate);
const start = toDate(dirtyStart);
const end = toDate(dirtyEnd);
return date.getTime() < end.getTime() && date.getTime() >= start.getTime();
};
export const sortDate = (a: Date | string, b: Date | string) => {
const res = isBefore(new Date(a), new Date(b)) ? -1 : 1;
return res;
};
export const checkWeekend = (val: Date) => isWeekend(val);
export const getCurrDate = () => new Date();
export const round = (value: number) => Math.round(value * 1000) / 1000;
export const getPos = (value: Date | number) => {
const currSec = (getHours(value) * 60 + getMinutes(value)) * 60 + getSeconds(value);
const totalSec = 24 * 60 * 60;
return currSec / totalSec;
};
export const isAllDayEvent = (event: EventObject) => 'allDay' in event && event.allDay;
/**
*
* @param {object} event
* normalize event object:
* if event object does not have start time, add start time = end time - 1h; if not same day, then startday of the end
* if event object does not have end time, add end time = start time + 1h; if not same day, then endday of the start
*/
export const amendEvent = (event: EventObject) => {
const { start, end } = event;
if (!start && !end) {
return undefined;
} else if (!start) {
event.start = isSameDay(end, addHours(end, -1)) ? addHours(end, -1) : startOfDay(end);
} else {
event.end = isSameDay(start, addHours(start, 1)) ? addHours(start, 1) : endOfDay(start);
}
return event;
};
/**
*
* @param {arr} events
* find the max topInd and used as row height
*/
export const calcRowHeight = (events: ParsedRangeEvent[]) => {
const topIndArr = events.map(item => item.topInd);
return topIndArr.length ? Math.max(...topIndArr) + 1 : 1;
};
export interface DateObj {
ind: number;
date: Date;
dayString: string;
weekday: string;
isToday: boolean;
isWeekend: boolean;
isSameMonth: boolean;
month: string;
}
export const calcRangeData = (value: Date, start: Date, rangeLen: number, mode: string, locale: Locale) => {
const today = getCurrDate();
const arr: Array<DateObj> = [];
[...Array(rangeLen).keys()].map(ind => {
const dateObj = {} as DateObj;
const date = addDays(start, ind);
dateObj.ind = ind;
dateObj.date = date;
dateObj.dayString = format(date, 'd', { locale });
dateObj.weekday = format(date, 'EEE', { locale });
dateObj.isToday = isSameDay(date, today);
dateObj.isWeekend = checkWeekend(date);
if (mode === 'month') {
dateObj.isSameMonth = isSameMonth(value, date);
dateObj.month = format(date, 'LLL', { locale });
}
arr.push(dateObj);
});
return arr;
};
/**
*
* @param {value} date
* @param {string} mode
* @param {string} locale
* @returns {object[]} { date: Date, dayString: string, ind: number, isToday: boolean, isWeekend: boolean, weekday: string }
* create weekly object array
*/
export const calcWeekData = (value: Date, mode = 'week', locale: Locale) => {
const start = startOfWeek(value);
return calcRangeData(value, start, 7, mode, locale);
};
/**
*
* @param {object} event
* @param {boolean} allDay
* @returns {object[]} { allDay: boolean, data: Date, start: Date, end: Date, children: ReactNode }
* parsed a spanned all-day event into multiple dates
*/
export const parseAllDayEvent = (event: EventObject, allDay = true, currDate: Date = undefined) => {
const res = [];
const { start, end } = event;
if (start && end) {
const diff = differenceInCalendarDays(end, start);
[...Array(diff + 1).keys()].map(day => {
res.push(copyEvent(event, addDays(start, day), null, null, allDay));
});
} else {
const date = start || end || currDate;
res.push(copyEvent(event, startOfDay(date), null, null, allDay));
}
return res;
};
/**
*
* @param {object} event
* @returns {object[]} { allDay: boolean, data: Date, start: Date, end: Date, children: ReactNode }
* parsed events
*/
export const parseEvent = (event: EventObject) => {
const { start, end } = event;
let res: EventObject[] = [];
if (isAllDayEvent(event)) {
return parseAllDayEvent(event);
}
if (start && end) {
if (!isBefore(start, end)) {
[event.start, event.end] = [event.end, event.start];
}
if (isSameDay(start, end)) {
res.push(copyEvent(event, startOfDay(start)));
} else if (Math.abs(differenceInHours(start, end)) < 24) {
res.push(copyEvent(event, startOfDay(start), null, endOfDay(start)));
res.push(copyEvent(event, startOfDay(end), startOfDay(end)));
} else {
res = res.concat(parseAllDayEvent(event));
}
} else {
const amend = amendEvent(event);
res.push(copyEvent(amend, startOfDay(amend.start)));
}
return res;
};
/**
*
* @param {arr} arr
* @param {key}
* @param {function} func callback function
* @returns {map}
* convert events array to may, use datestring as key
*/
export const convertEventsArrToMap = (
arr: EventObject[],
key: 'start' | 'date',
func: (val: Date) => Date,
displayValue?: Date
) => {
const res = new Map();
arr.forEach(item => {
let val;
if (key in item) {
val = item[key];
} else {
val = startOfDay(displayValue);
}
const k = func ? func(val).toString() : val.toString();
if (res.has(k)) {
res.get(k).push(item);
} else {
res.set(k, [item]);
}
});
return res;
};
/**
* @returns {arr}
* filter out event that is not in the date range
*/
export const filterEvents = (events: Map<string, EventObject[]>, start: Date, end: Date) => {
const res = new Map<string, EventObject[]>();
[...events.keys()].map(day => {
const item = events.get(day);
const date = new Date(day);
if (isDateInRange(date, start, end)) {
res.set(day, item);
} else if (isBefore(end, date)) {
// do nothing
} else {
const filtered = item.filter(i => !i.end || !isBefore(i.end, start));
const key = start.toString();
if (res.has(key)) {
res.set(key, [...res.get(key), ...filtered]);
} else {
res.set(key, item);
}
}
});
return res;
};
/**
* @returns {arr}
* filter out event that is not in the week range
*/
// eslint-disable-next-line max-len
export const filterWeeklyEvents = (events: Map<string, EventObject[]>, weekStart: Date) => filterEvents(events, weekStart, addDays(endOfWeek(weekStart), 1));
/**
* @returns {arr}
* arrange and sort all day event for a range
*/
export const parseRangeAllDayEvent = (
event: EventObject[],
startDate: Date,
rangeStart: Date,
rangeEnd: Date,
parsed: Array<Array<ParsedRangeEvent>>
) => {
const dateRangeLen = differenceInCalendarDays(rangeEnd, rangeStart);
event.sort((a, b) => sortDate(a.start, b.start)).forEach(item => {
const itemInfo = { ...item };
const { end } = item;
let dateLength;
const j = differenceInCalendarDays(startDate, rangeStart);
let i = 0;
while (Boolean(parsed[i]) && Boolean(parsed[i][j])) {
i++;
}
if (!end) {
dateLength = 0;
} else {
dateLength = isDateInRange(end, rangeStart, rangeEnd) ?
differenceInCalendarDays(end, startDate) :
differenceInCalendarDays(rangeEnd, startDate);
}
itemInfo.leftPos = round(Number(j) / dateRangeLen);
itemInfo.width = Math.min(1 - round(Number(j) / dateRangeLen), round((dateLength + 1) * 1 / dateRangeLen));
itemInfo.topInd = i;
[...Array(dateLength + 1).keys()].forEach(dist => {
if (!parsed[i]) {
parsed[i] = [];
}
if (dist > 0) {
parsed[i][j + dist] = item;
} else {
parsed[i][j + dist] = itemInfo;
}
});
});
return parsed;
};
/**
* @returns {arr}
* arrange and sort weekly all day event
*/
export const parseWeeklyAllDayEvent = (
event: EventObject[],
startDate: Date,
weekStart: Date,
parsed: Array<Array<ParsedRangeEvent>>
) => parseRangeAllDayEvent(event, startDate, weekStart, addDays(endOfWeek(startDate), 1), parsed);
export const collectDailyEvents = (events: ParsedRangeEvent[][]) => {
const collections = {} as ParsedRangeEvent[][];
events.forEach((row, rowInd) => {
row.forEach((event, ind) => {
if (collections[ind]) {
collections[ind][rowInd] = event;
} else {
collections[ind] = [];
collections[ind][rowInd] = event;
}
});
});
return collections;
};
export const renderDailyEvent = (event: EventObject) => {
// eslint-disable-next-line prefer-const
let { start, end, allDay, children } = event;
let startPos,
endPos;
if (isAllDayEvent(event)) {
startPos = 0;
endPos = 0;
} else if (!start || !end) {
const amend = amendEvent(event);
endPos = getPos(amend.end);
startPos = getPos(amend.start);
} else {
if (!isBefore(start, end)) {
[start, end] = [end, start];
}
startPos = getPos(start);
endPos = getPos(end);
}
const parsed = {
startPos: round(startPos),
endPos: round(endPos),
children,
allDay: Boolean(allDay),
};
return parsed;
}; | the_stack |
import { DIBitmap, PatternBitmap16 } from "./Bitmap";
import { Blob } from "./Blob";
import { GDIContext } from "./GDIContext";
import { Helper, WMFJSError } from "./Helper";
import { PointS, Rect } from "./Primitives";
import { Region } from "./Region";
import { Brush, ColorRef, Font, Palette, Pen } from "./Style";
export class WMFRecords {
private _records: ((gdi: GDIContext) => void)[];
constructor(reader: Blob, first: number) {
this._records = [];
let all = false;
let curpos = first;
main_loop: while (!all) {
reader.seek(curpos);
const size = reader.readUint32();
if (size < 3) {
throw new WMFJSError("Invalid record size");
}
const type = reader.readUint16();
switch (type) {
case Helper.GDI.RecordType.META_EOF:
all = true;
break main_loop;
case Helper.GDI.RecordType.META_SETMAPMODE: {
const mapMode = reader.readUint16();
this._records.push((gdi) => {
gdi.setMapMode(mapMode);
});
break;
}
case Helper.GDI.RecordType.META_SETWINDOWORG: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.setWindowOrg(x, y);
});
break;
}
case Helper.GDI.RecordType.META_SETWINDOWEXT: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.setWindowExt(x, y);
});
break;
}
case Helper.GDI.RecordType.META_OFFSETWINDOWORG: {
const offY = reader.readInt16();
const offX = reader.readInt16();
this._records.push((gdi) => {
gdi.offsetWindowOrg(offX, offY);
});
break;
}
case Helper.GDI.RecordType.META_SETVIEWPORTORG: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.setViewportOrg(x, y);
});
break;
}
case Helper.GDI.RecordType.META_SETVIEWPORTEXT: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.setViewportExt(x, y);
});
break;
}
case Helper.GDI.RecordType.META_OFFSETVIEWPORTORG: {
const offY = reader.readInt16();
const offX = reader.readInt16();
this._records.push((gdi) => {
gdi.offsetViewportOrg(offX, offY);
});
break;
}
case Helper.GDI.RecordType.META_SAVEDC: {
this._records.push((gdi) => {
gdi.saveDC();
});
break;
}
case Helper.GDI.RecordType.META_RESTOREDC: {
const saved = reader.readInt16();
this._records.push((gdi) => {
gdi.restoreDC(saved);
});
break;
}
case Helper.GDI.RecordType.META_SETSTRETCHBLTMODE: {
const stretchMode = reader.readUint16();
this._records.push((gdi) => {
gdi.setStretchBltMode(stretchMode);
});
break;
}
case Helper.GDI.RecordType.META_DIBSTRETCHBLT: {
const haveSrcDib = ((type >> 8) + 3 !== size);
const rasterOp = reader.readUint16() | (reader.readUint16() << 16);
const srcH = reader.readInt16();
const srcW = reader.readInt16();
const srcY = reader.readInt16();
const srcX = reader.readInt16();
const destH = reader.readInt16();
const destW = reader.readInt16();
const destY = reader.readInt16();
const destX = reader.readInt16();
const datalength = size * 2 - (reader.pos - curpos);
const dib = new DIBitmap(reader, datalength);
this._records.push((gdi) => {
gdi.stretchDibBits(srcX, srcY, srcW, srcH, destX, destY, destW, destH, rasterOp, dib);
});
break;
}
case Helper.GDI.RecordType.META_STRETCHDIB: {
const rasterOp = reader.readUint16() | (reader.readUint16() << 16);
const colorUsage = reader.readInt16();
const srcH = reader.readInt16();
const srcW = reader.readInt16();
const srcY = reader.readInt16();
const srcX = reader.readInt16();
const destH = reader.readInt16();
const destW = reader.readInt16();
const destY = reader.readInt16();
const destX = reader.readInt16();
const datalength = size * 2 - (reader.pos - curpos);
const dib = new DIBitmap(reader, datalength);
this._records.push((gdi) => {
gdi.stretchDib(srcX, srcY, srcW, srcH, destX, destY, destW, destH, rasterOp, colorUsage, dib);
});
break;
}
case Helper.GDI.RecordType.META_ESCAPE: {
const func = reader.readUint16();
const count = reader.readUint16();
const offset = reader.pos;
const blob = new Blob(reader, offset);
this._records.push((gdi) => {
gdi.escape(func, blob, offset, count);
});
break;
}
case Helper.GDI.RecordType.META_SETTEXTALIGN: {
const textAlign = reader.readUint16();
this._records.push((gdi) => {
gdi.setTextAlign(textAlign);
});
break;
}
case Helper.GDI.RecordType.META_SETBKMODE: {
const bkMode = reader.readUint16();
this._records.push((gdi) => {
gdi.setBkMode(bkMode);
});
break;
}
case Helper.GDI.RecordType.META_SETTEXTCOLOR: {
const textColor = new ColorRef(reader);
this._records.push((gdi) => {
gdi.setTextColor(textColor);
});
break;
}
case Helper.GDI.RecordType.META_SETBKCOLOR: {
const bkColor = new ColorRef(reader);
this._records.push((gdi) => {
gdi.setBkColor(bkColor);
});
break;
}
case Helper.GDI.RecordType.META_CREATEBRUSHINDIRECT: {
const datalength = size * 2 - (reader.pos - curpos);
const brush = new Brush(reader, datalength, false);
this._records.push((gdi) => {
gdi.createBrush(brush);
});
break;
}
case Helper.GDI.RecordType.META_DIBCREATEPATTERNBRUSH: {
const datalength = size * 2 - (reader.pos - curpos);
const brush = new Brush(reader, datalength, true);
this._records.push((gdi) => {
gdi.createBrush(brush);
});
break;
}
case Helper.GDI.RecordType.META_CREATEPENINDIRECT: {
const pen = new Pen(reader);
this._records.push((gdi) => {
gdi.createPen(pen);
});
break;
}
case Helper.GDI.RecordType.META_CREATEFONTINDIRECT: {
const datalength = size * 2 - (reader.pos - curpos);
const font = new Font(reader, datalength);
this._records.push((gdi) => {
gdi.createFont(font);
});
break;
}
case Helper.GDI.RecordType.META_SELECTOBJECT: {
const idx = reader.readUint16();
this._records.push((gdi) => {
gdi.selectObject(idx, null);
});
break;
}
case Helper.GDI.RecordType.META_SELECTPALETTE: {
const idx = reader.readUint16();
this._records.push((gdi) => {
gdi.selectObject(idx, "palette");
});
break;
}
case Helper.GDI.RecordType.META_SELECTCLIPREGION: {
const idx = reader.readUint16();
this._records.push((gdi) => {
gdi.selectObject(idx, "region");
});
break;
}
case Helper.GDI.RecordType.META_DELETEOBJECT: {
const idx = reader.readUint16();
this._records.push((gdi) => {
gdi.deleteObject(idx);
});
break;
}
case Helper.GDI.RecordType.META_RECTANGLE: {
const rect = new Rect(reader);
this._records.push((gdi) => {
gdi.rectangle(rect, 0, 0);
});
break;
}
case Helper.GDI.RecordType.META_ROUNDRECT: {
const rh = reader.readInt16();
const rw = reader.readInt16();
const rect = new Rect(reader);
this._records.push((gdi) => {
gdi.rectangle(rect, rw, rh);
});
break;
}
case Helper.GDI.RecordType.META_LINETO: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.lineTo(x, y);
});
break;
}
case Helper.GDI.RecordType.META_MOVETO: {
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.moveTo(x, y);
});
break;
}
case Helper.GDI.RecordType.META_TEXTOUT: {
const len = reader.readInt16();
if (len > 0) {
const text = reader.readString(len);
reader.skip(len % 2);
const y = reader.readInt16();
const x = reader.readInt16();
this._records.push((gdi) => {
gdi.textOut(x, y, text);
});
}
break;
}
case Helper.GDI.RecordType.META_EXTTEXTOUT: {
const y = reader.readInt16();
const x = reader.readInt16();
const len = reader.readInt16();
const fwOpts = reader.readUint16();
let hasRect = null;
let hasDx = null;
if (size * 2 === 14 + len + len % 2) {
hasRect = false;
hasDx = false;
}
if (size * 2 === 14 + 8 + len + len % 2) {
hasRect = true;
hasDx = false;
}
if (size * 2 === 14 + len + len % 2 + len * 2) {
hasRect = false;
hasDx = true;
}
if (size * 2 === 14 + 8 + len + len % 2 + len * 2) {
hasRect = true;
hasDx = true;
}
const rect = hasRect ? new Rect(reader) : null;
if (len > 0) {
const text = reader.readString(len);
reader.skip(len % 2);
const dx: number[] = [];
if (hasDx) {
for (let i = 0; i < text.length; i++) {
dx.push(reader.readInt16());
}
}
this._records.push((gdi) => {
gdi.extTextOut(x, y, text, fwOpts, rect, dx);
});
}
break;
}
case Helper.GDI.RecordType.META_EXCLUDECLIPRECT: {
const rect = new Rect(reader);
this._records.push((gdi) => {
gdi.excludeClipRect(rect);
});
break;
}
case Helper.GDI.RecordType.META_INTERSECTCLIPRECT: {
const rect = new Rect(reader);
this._records.push((gdi) => {
gdi.intersectClipRect(rect);
});
break;
}
case Helper.GDI.RecordType.META_POLYGON: {
let cnt = reader.readInt16();
const points: PointS[] = [];
while (cnt > 0) {
points.push(new PointS(reader));
cnt--;
}
this._records.push((gdi) => {
gdi.polygon(points, true);
});
break;
}
case Helper.GDI.RecordType.META_SETPOLYFILLMODE: {
const polyfillmode = reader.readUint16();
this._records.push((gdi) => {
gdi.setPolyFillMode(polyfillmode);
});
break;
}
case Helper.GDI.RecordType.META_POLYPOLYGON: {
const cnt = reader.readUint16();
const polygonsPtCnts = [];
for (let i = 0; i < cnt; i++) {
polygonsPtCnts.push(reader.readUint16());
}
const polygons: PointS[][] = [];
for (let i = 0; i < cnt; i++) {
const ptCnt = polygonsPtCnts[i];
const p = [];
for (let ip = 0; ip < ptCnt; ip++) {
p.push(new PointS(reader));
}
polygons.push(p);
}
this._records.push((gdi) => {
gdi.polyPolygon(polygons);
});
break;
}
case Helper.GDI.RecordType.META_POLYLINE: {
let cnt = reader.readInt16();
const points: PointS[] = [];
while (cnt > 0) {
points.push(new PointS(reader));
cnt--;
}
this._records.push((gdi) => {
gdi.polyline(points);
});
break;
}
case Helper.GDI.RecordType.META_ELLIPSE: {
const rect = new Rect(reader);
this._records.push((gdi) => {
gdi.ellipse(rect);
});
break;
}
case Helper.GDI.RecordType.META_CREATEPALETTE: {
const palette = new Palette(reader);
this._records.push((gdi) => {
gdi.createPalette(palette);
});
break;
}
case Helper.GDI.RecordType.META_CREATEREGION: {
const region = new Region(reader);
this._records.push((gdi) => {
gdi.createRegion(region);
});
break;
}
case Helper.GDI.RecordType.META_CREATEPATTERNBRUSH: {
const datalength = size * 2 - (reader.pos - curpos);
const patternBitmap = new PatternBitmap16(reader, datalength);
const brush = new Brush(reader, datalength, patternBitmap);
this._records.push((gdi) => {
gdi.createPatternBrush(brush);
});
break;
}
case Helper.GDI.RecordType.META_OFFSETCLIPRGN: {
const offY = reader.readInt16();
const offX = reader.readInt16();
this._records.push((gdi) => {
gdi.offsetClipRgn(offX, offY);
});
break;
}
case Helper.GDI.RecordType.META_REALIZEPALETTE:
case Helper.GDI.RecordType.META_SETPALENTRIES:
case Helper.GDI.RecordType.META_SETROP2:
case Helper.GDI.RecordType.META_SETRELABS:
case Helper.GDI.RecordType.META_SETTEXTCHAREXTRA:
case Helper.GDI.RecordType.META_RESIZEPALETTE:
case Helper.GDI.RecordType.META_SETLAYOUT:
case Helper.GDI.RecordType.META_FILLREGION:
case Helper.GDI.RecordType.META_SETMAPPERFLAGS:
case Helper.GDI.RecordType.META_SETTEXTJUSTIFICATION:
case Helper.GDI.RecordType.META_SCALEWINDOWEXT:
case Helper.GDI.RecordType.META_SCALEVIEWPORTEXT:
case Helper.GDI.RecordType.META_FLOODFILL:
case Helper.GDI.RecordType.META_FRAMEREGION:
case Helper.GDI.RecordType.META_ANIMATEPALETTE:
case Helper.GDI.RecordType.META_EXTFLOODFILL:
case Helper.GDI.RecordType.META_SETPIXEL:
case Helper.GDI.RecordType.META_PATBLT:
case Helper.GDI.RecordType.META_PIE:
case Helper.GDI.RecordType.META_STRETCHBLT:
case Helper.GDI.RecordType.META_INVERTREGION:
case Helper.GDI.RecordType.META_PAINTREGION:
case Helper.GDI.RecordType.META_ARC:
case Helper.GDI.RecordType.META_CHORD:
case Helper.GDI.RecordType.META_BITBLT:
case Helper.GDI.RecordType.META_SETDIBTODEV:
case Helper.GDI.RecordType.META_DIBBITBLT:
default: {
let recordName = "UNKNOWN";
for (const name in Helper.GDI.RecordType) {
const recordTypes: any = Helper.GDI.RecordType;
if (recordTypes[name] === type) {
recordName = name;
break;
}
}
Helper.log("[WMF] " + recordName + " record (0x" + type.toString(16) + ") at offset 0x"
+ curpos.toString(16) + " with " + (size * 2) + " bytes");
break;
}
}
curpos += size * 2;
}
if (!all) {
throw new WMFJSError("Could not read all records");
}
}
public play(gdi: GDIContext): void {
const len = this._records.length;
for (let i = 0; i < len; i++) {
this._records[i](gdi);
}
}
} | the_stack |
import * as chai from 'chai';
import * as parse5 from 'parse5';
import {AssignedBundle, BundleManifest} from '../bundle-manifest';
import {Bundler} from '../bundler';
import {bundle, HtmlBundler} from '../html-bundler';
import {parse} from '../parse5-utils';
import {getFileUrl} from '../url-utils';
import {heredoc, inMemoryAnalyzer} from './test-utils';
chai.config.showDiff = true;
const assert = chai.assert;
const stripSpace = (html: string): string =>
html.replace(/>\s+/g, '>').replace(/>/g, '>\n').trim();
suite('HtmlBundler', () => {
test('inline es6 modules', async () => {
const analyzer = inMemoryAnalyzer({
'multiple-inline-modules.html': `
<script type="module">
import {A, B} from './abc.js';
console.log(A,B);
</script>
<script type="module">
import {B, C} from './abc.js';
import {Y} from './xyz.js';
console.log(B,C,Y);
</script>
<script type="module">
import {D,F} from './def.js';
console.log(D,F);
</script>
`,
'abc.js': `
import{upcase} from './upcase.js';
export const A = upcase('a');
export const B = upcase('b');
export const C = upcase('c');
`,
'def.js': `
import{X, Y, Z} from './xyz.js';
const D = X + X;
const E = Y + Y;
const F = Z + Z;
export { D, E, F };
`,
'omgz.js': `
import {upcase} from './upcase.js';
export const Z = upcase('omgz');
`,
'upcase.js': `
export function upcase(str) {
return str.toUpperCase();
}
`,
'xyz.js': `
import{upcase} from './upcase.js';
export const X = upcase('x');
export const Y = upcase('y');
export const Z = upcase('z');
`,
});
const bundler = new Bundler({analyzer});
const multipleInlineBundlesUrl =
analyzer.resolveUrl('multiple-inline-modules.html')!;
const manifest = await bundler.generateManifest([multipleInlineBundlesUrl]);
const multipleInlineBundlesBundleDocument =
await bundle(bundler, manifest, multipleInlineBundlesUrl);
assert.deepEqual(multipleInlineBundlesBundleDocument.content, heredoc`
<script type="module">
import { A, B } from './shared_bundle_1.js';
console.log(A, B);
</script>
<script type="module">
import { B, C, Y } from './shared_bundle_1.js';
console.log(B, C, Y);
</script>
<script type="module">
import { X, Y, Z } from './shared_bundle_1.js';
const D = X + X;
const E = Y + Y;
const F = Z + Z;
console.log(D, F);
</script>
`);
});
suite('unit tests of private rewriting methods', () => {
const importDocUrl = getFileUrl('foo/bar/my-element/index.html');
const mainDocUrl = getFileUrl('foo/bar/index.html');
let bundler: Bundler;
let htmlBundler: HtmlBundler;
let manifest: BundleManifest;
let bundle: AssignedBundle;
beforeEach(async () => {
bundler = new Bundler();
await bundler.analyzeContents(mainDocUrl, '', true);
manifest = await bundler.generateManifest([mainDocUrl]);
bundle = manifest.getBundleForFile(mainDocUrl)!;
htmlBundler = new HtmlBundler(bundler, bundle, manifest);
});
suite('Path rewriting', async () => {
test('Rewrite URLs', async () => {
const css = `
x-element {
background-image: url(foo.jpg);
}
x-bar {
background-image: url(data:xxxxx);
}
x-quuz {
background-image: url(\'https://foo.bar/baz.jpg\');
}
`;
const expected = `
x-element {
background-image: url("my-element/foo.jpg");
}
x-bar {
background-image: url("data:xxxxx");
}
x-quuz {
background-image: url("https://foo.bar/baz.jpg");
}
`;
const actual = htmlBundler['_rewriteCssTextBaseUrl'](
css, importDocUrl, mainDocUrl);
assert.deepEqual(actual, expected);
});
suite('Resolve Paths', () => {
test('excluding template elements', () => {
const html = `
<link rel="import" href="../polymer/polymer.html">
<link rel="stylesheet" href="my-element.css">
<dom-module id="my-element">
<template>
<img src="neato.gif">
<style>:host { background-image: url(background.svg); }</style>
<div style="background-image: url(background.svg)"></div>
</template>
<script>Polymer({is: "my-element"})</script>
</dom-module>
<template is="dom-bind">
<style>.outside-dom-module { background-image: url(outside-dom-module.png); }</style>
</template>
<style>.outside-template { background-image: url(outside-template.png); }</style>`;
const expected = `
<link rel="import" href="polymer/polymer.html">
<link rel="stylesheet" href="my-element/my-element.css">
<dom-module id="my-element" assetpath="my-element/">
<template>
<img src="neato.gif">
<style>:host { background-image: url(background.svg); }</style>
<div style="background-image: url(background.svg)"></div>
</template>
<script>Polymer({is: "my-element"})</script>
</dom-module>
<template is="dom-bind">
<style>.outside-dom-module { background-image: url(outside-dom-module.png); }</style>
</template>
<style>.outside-template { background-image: url("my-element/outside-template.png"); }</style>
`;
const ast = parse(html);
bundler.rewriteUrlsInTemplates = false;
htmlBundler['_rewriteAstBaseUrl'](ast, importDocUrl, mainDocUrl);
const actual = parse5.serialize(ast);
assert.deepEqual(
stripSpace(actual), stripSpace(expected), 'relative');
});
test('inside template elements (rewriteUrlsInTemplates=true)', () => {
const html = `
<link rel="import" href="../polymer/polymer.html">
<link rel="stylesheet" href="my-element.css">
<dom-module id="my-element">
<template>
<style>:host { background-image: url(background.svg); }</style>
<div style="background-image: url(background.svg)"></div>
</template>
<script>Polymer({is: "my-element"})</script>
</dom-module>
<template is="dom-bind">
<style>.something { background-image: url(something.png); }</style>
</template>
<style>.outside-template { background-image: url(outside-template.png); }</style>
`;
const expected = `
<link rel="import" href="polymer/polymer.html">
<link rel="stylesheet" href="my-element/my-element.css">
<dom-module id="my-element" assetpath="my-element/">
<template>
<style>:host { background-image: url("my-element/background.svg"); }</style>
<div style="background-image: url("my-element/background.svg")"></div>
</template>
<script>Polymer({is: "my-element"})</script>
</dom-module>
<template is="dom-bind">
<style>.something { background-image: url("my-element/something.png"); }</style>
</template>
<style>.outside-template { background-image: url("my-element/outside-template.png"); }</style>
`;
const ast = parse(html);
bundler.rewriteUrlsInTemplates = true;
htmlBundler['_rewriteAstBaseUrl'](ast, importDocUrl, mainDocUrl);
const actual = parse5.serialize(ast);
assert.deepEqual(
stripSpace(actual), stripSpace(expected), 'relative');
});
});
test('Leave Templated URLs', () => {
const base = `
<a href="{{foo}}"></a>
<img src="[[bar]]">
`;
const ast = parse(base);
htmlBundler['_rewriteAstBaseUrl'](ast, importDocUrl, mainDocUrl);
const actual = parse5.serialize(ast);
assert.deepEqual(
stripSpace(actual), stripSpace(base), 'templated urls');
});
});
suite('Document <base> tag emulation', () => {
test('Resolve Paths with <base href> having a trailing /', () => {
const htmlBase = `
<base href="components/my-element/">
<link rel="import" href="../polymer/polymer.html">
<link rel="stylesheet" href="my-element.css">
<dom-module id="my-element">
<template>
<style>:host { background-image: url(background.svg); }</style>
<img src="bloop.gif">
</template>
</dom-module>
<script>Polymer({is: "my-element"})</script>`;
const expectedBase = `
<link rel="import" href="components/polymer/polymer.html">
<link rel="stylesheet" href="components/my-element/my-element.css">
<dom-module id="my-element" assetpath="components/my-element/">
<template>
<style>:host { background-image: url(background.svg); }</style>
<img src="bloop.gif">
</template>
</dom-module>
<script>Polymer({is: "my-element"})</script>`;
const ast = parse(htmlBase);
htmlBundler['_rewriteAstToEmulateBaseTag'](
ast, getFileUrl('the/doc/url'));
const actual = parse5.serialize(ast);
assert.deepEqual(stripSpace(actual), stripSpace(expectedBase), 'base');
});
// Old vulcanize did the wrong thing with base href that had no trailing
// slash, so this proves the behavior of bundler is correct in this case.
test('Resolve Paths with <base href> with no trailing slash', () => {
const htmlBase = `
<base href="components/my-element">
<link rel="import" href="../polymer/polymer.html">
<link rel="stylesheet" href="my-element.css">
<dom-module id="my-element">
<template>
<style>:host { background-image: url(background.svg); }</style>
<img src="bloop.gif">
</template>
</dom-module>
<script>Polymer({is: "my-element"})</script>
`;
const expectedBase = `
<link rel="import" href="polymer/polymer.html">
<link rel="stylesheet" href="components/my-element.css">
<dom-module id="my-element" assetpath="components/">
<template>
<style>:host { background-image: url(background.svg); }</style>
<img src="bloop.gif">
</template>
</dom-module>
<script>Polymer({is: "my-element"})</script>
`;
const ast = parse(htmlBase);
htmlBundler['_rewriteAstToEmulateBaseTag'](
ast, getFileUrl('the/doc/url'));
const actual = parse5.serialize(ast);
assert.deepEqual(stripSpace(actual), stripSpace(expectedBase), 'base');
});
test('Apply <base target> to all links and forms without target', () => {
const htmlBase = `
<base target="_blank">
<a href="foo.html">LINK</a>
<a href="bar.html" target="leavemealone">OTHERLINK</a>
<form action="doit"></form>
<form action="doitagain" target="leavemealone"></form>
<div>Just a div. I don't need a target</div>
`;
const expectedBase = `
<a href="foo.html" target="_blank">LINK</a>
<a href="bar.html" target="leavemealone">OTHERLINK</a>
<form action="doit" target="_blank"></form>
<form action="doitagain" target="leavemealone"></form>
<div>Just a div. I don't need a target</div>
`;
const ast = parse(htmlBase);
htmlBundler['_rewriteAstToEmulateBaseTag'](
ast, getFileUrl('the/doc/url'));
const actual = parse5.serialize(ast);
assert.deepEqual(
stripSpace(actual), stripSpace(expectedBase), 'base target');
});
});
});
}); | the_stack |
import { Container, Contracts, Enums, Providers, Services, Utils } from "@arkecosystem/core-kernel";
import { Interfaces } from "@arkecosystem/crypto";
import delay from "delay";
import prettyMs from "pretty-ms";
import { NetworkState } from "./network-state";
import { Peer } from "./peer";
import { PeerCommunicator } from "./peer-communicator";
import { checkDNS, checkNTP } from "./utils";
const defaultDownloadChunkSize = 400;
// todo: review the implementation
@Container.injectable()
export class NetworkMonitor implements Contracts.P2P.NetworkMonitor {
@Container.inject(Container.Identifiers.Application)
private readonly app!: Contracts.Kernel.Application;
@Container.inject(Container.Identifiers.PluginConfiguration)
@Container.tagged("plugin", "@arkecosystem/core-p2p")
private readonly configuration!: Providers.PluginConfiguration;
@Container.inject(Container.Identifiers.PeerCommunicator)
private readonly communicator!: PeerCommunicator;
@Container.inject(Container.Identifiers.PeerRepository)
private readonly repository!: Contracts.P2P.PeerRepository;
@Container.inject(Container.Identifiers.PeerChunkCache)
private readonly chunkCache!: Contracts.P2P.ChunkCache;
@Container.inject(Container.Identifiers.EventDispatcherService)
private readonly events!: Contracts.Kernel.EventDispatcher;
@Container.inject(Container.Identifiers.LogService)
private readonly logger!: Contracts.Kernel.Logger;
public config: any;
public nextUpdateNetworkStatusScheduled: boolean | undefined;
private coldStart: boolean = false;
private downloadChunkSize: number = defaultDownloadChunkSize;
private initializing = true;
@Container.postConstruct()
public initialize(): void {
this.config = this.configuration.all(); // >_<
}
public async boot(): Promise<void> {
await this.checkDNSConnectivity(this.config.dns);
await this.checkNTPConnectivity(this.config.ntp);
await this.populateSeedPeers();
if (this.config.skipDiscovery) {
this.logger.warning("Skipped peer discovery because the relay is in skip-discovery mode.");
} else {
await this.updateNetworkStatus(true);
for (const [version, peers] of Object.entries(
// @ts-ignore
Utils.groupBy(this.repository.getPeers(), (peer) => peer.version),
)) {
this.logger.info(`Discovered ${Utils.pluralize("peer", peers.length, true)} with v${version}.`);
}
}
// Give time to cooldown rate limits after peer verifier finished.
await Utils.sleep(1000);
this.initializing = false;
}
public async updateNetworkStatus(initialRun?: boolean): Promise<void> {
if (process.env.NODE_ENV === "test") {
return;
}
if (this.config.networkStart) {
this.coldStart = true;
this.logger.warning("Entering cold start because the relay is in genesis-start mode.");
}
if (this.config.disableDiscovery) {
this.logger.warning("Skipped peer discovery because the relay is in non-discovery mode.");
return;
}
try {
if (await this.discoverPeers(initialRun)) {
await this.cleansePeers();
}
} catch (error) {
this.logger.error(`Network Status: ${error.message}`);
}
let nextRunDelaySeconds = 600;
if (!this.hasMinimumPeers()) {
await this.populateSeedPeers();
nextRunDelaySeconds = 60;
this.logger.info(`Couldn't find enough peers. Falling back to seed peers.`);
}
this.scheduleUpdateNetworkStatus(nextRunDelaySeconds);
}
public async cleansePeers({
fast = false,
forcePing = false,
peerCount,
}: { fast?: boolean; forcePing?: boolean; peerCount?: number } = {}): Promise<void> {
let peers = this.repository.getPeers();
let max = peers.length;
let unresponsivePeers = 0;
const pingDelay = fast ? 1500 : this.config.verifyTimeout;
if (peerCount) {
peers = Utils.shuffle(peers).slice(0, peerCount);
max = Math.min(peers.length, peerCount);
}
this.logger.info(`Checking ${Utils.pluralize("peer", max, true)}`);
const peerErrors = {};
// we use Promise.race to cut loose in case some communicator.ping() does not resolve within the delay
// in that case we want to keep on with our program execution while ping promises can finish in the background
await new Promise((resolve) => {
let isResolved = false;
// Simulates Promise.race, but doesn't cause "multipleResolvers" process error
const resolvesFirst = () => {
if (!isResolved) {
isResolved = true;
resolve();
}
};
Promise.all(
peers.map(async (peer) => {
try {
await this.communicator.ping(peer, pingDelay, forcePing);
} catch (error) {
unresponsivePeers++;
peerErrors[error] = peerErrors[error] || [];
peerErrors[error].push(peer);
await this.events.dispatch(Enums.PeerEvent.Disconnect, { peer });
this.events.dispatch(Enums.PeerEvent.Removed, peer);
}
}),
).then(resolvesFirst);
delay(pingDelay).finally(resolvesFirst);
});
for (const key of Object.keys(peerErrors)) {
const peerCount = peerErrors[key].length;
this.logger.debug(`Removed ${Utils.pluralize("peer", peerCount, true)} because of "${key}"`);
}
if (this.initializing) {
this.logger.info(
`${max - unresponsivePeers} of ${Utils.pluralize("peer", max, true)} on the network are responsive`,
);
this.logger.info(`Median Network Height: ${this.getNetworkHeight().toLocaleString()}`);
}
}
public async discoverPeers(pingAll?: boolean): Promise<boolean> {
const maxPeersPerPeer = 50;
const ownPeers: Contracts.P2P.Peer[] = this.repository.getPeers();
const theirPeers: Contracts.P2P.Peer[] = Object.values(
(
await Promise.all(
Utils.shuffle(this.repository.getPeers())
.slice(0, 8)
.map(async (peer: Contracts.P2P.Peer) => {
try {
const hisPeers = await this.communicator.getPeers(peer);
return hisPeers || [];
} catch (error) {
this.logger.debug(`Failed to get peers from ${peer.ip}: ${error.message}`);
return [];
}
}),
)
)
.map((peers) =>
Utils.shuffle(peers)
.slice(0, maxPeersPerPeer)
.reduce(
// @ts-ignore - rework this so TS stops throwing errors
(acc: object, curr: Contracts.P2P.PeerBroadcast) => ({
...acc,
...{ [curr.ip]: new Peer(curr.ip, curr.port) },
}),
{},
),
)
.reduce((acc: object, curr: { [ip: string]: Contracts.P2P.Peer }) => ({ ...acc, ...curr }), {}),
);
if (pingAll || !this.hasMinimumPeers() || ownPeers.length < theirPeers.length * 0.75) {
await Promise.all(
theirPeers.map((p) =>
this.app
.get<Services.Triggers.Triggers>(Container.Identifiers.TriggerService)
.call("validateAndAcceptPeer", { peer: p, options: { lessVerbose: true } }),
),
);
this.pingPeerPorts(pingAll);
return true;
}
this.pingPeerPorts();
return false;
}
public isColdStart(): boolean {
return this.coldStart;
}
public completeColdStart(): void {
this.coldStart = false;
}
public getNetworkHeight(): number {
const medians = this.repository
.getPeers()
.filter((peer) => peer.state.height)
.map((peer) => peer.state.height)
.sort((a, b) => {
Utils.assert.defined<string>(a);
Utils.assert.defined<string>(b);
return a - b;
});
return medians[Math.floor(medians.length / 2)] || 0;
}
public async getNetworkState(): Promise<Contracts.P2P.NetworkState> {
await this.cleansePeers({ fast: true, forcePing: true });
return await NetworkState.analyze(this, this.repository);
}
public async refreshPeersAfterFork(): Promise<void> {
this.logger.info(`Refreshing ${Utils.pluralize("peer", this.repository.getPeers().length, true)} after fork.`);
await this.cleansePeers({ forcePing: true });
}
public async checkNetworkHealth(): Promise<Contracts.P2P.NetworkStatus> {
await this.discoverPeers(true);
await this.cleansePeers({ forcePing: true });
const lastBlock: Interfaces.IBlock = this.app
.get<Contracts.State.StateStore>(Container.Identifiers.StateStore)
.getLastBlock();
const verificationResults: Contracts.P2P.PeerVerificationResult[] = this.repository
.getPeers()
.filter((peer) => peer.verificationResult)
.map((peer) => peer.verificationResult!);
if (verificationResults.length === 0) {
this.logger.info("No verified peers available.");
return { forked: false };
}
const forkVerificationResults: Contracts.P2P.PeerVerificationResult[] = verificationResults.filter(
(verificationResult: Contracts.P2P.PeerVerificationResult) => verificationResult.forked,
);
const forkHeights: number[] = forkVerificationResults
.map((verificationResult: Contracts.P2P.PeerVerificationResult) => verificationResult.highestCommonHeight)
.filter((forkHeight, i, arr) => arr.indexOf(forkHeight) === i) // unique
.sort()
.reverse();
for (const forkHeight of forkHeights) {
const forkPeerCount = forkVerificationResults.filter((vr) => vr.highestCommonHeight === forkHeight).length;
const ourPeerCount = verificationResults.filter((vr) => vr.highestCommonHeight > forkHeight).length + 1;
if (forkPeerCount > ourPeerCount) {
const blocksToRollback = lastBlock.data.height - forkHeight;
if (blocksToRollback > 5000) {
this.logger.info(
`Rolling back 5000/${blocksToRollback} blocks to fork at height ${forkHeight} (${ourPeerCount} vs ${forkPeerCount}).`,
);
return { forked: true, blocksToRollback: 5000 };
} else {
this.logger.info(
`Rolling back ${blocksToRollback} blocks to fork at height ${forkHeight} (${ourPeerCount} vs ${forkPeerCount}).`,
);
return { forked: true, blocksToRollback };
}
} else {
this.logger.debug(`Ignoring fork at height ${forkHeight} (${ourPeerCount} vs ${forkPeerCount}).`);
}
}
return { forked: false };
}
public async downloadBlocksFromHeight(
fromBlockHeight: number,
maxParallelDownloads = 10,
): Promise<Interfaces.IBlockData[]> {
const peersAll: Contracts.P2P.Peer[] = this.repository.getPeers();
if (peersAll.length === 0) {
this.logger.error(`Could not download blocks: we have 0 peers`);
return [];
}
const peersNotForked: Contracts.P2P.Peer[] = Utils.shuffle(peersAll.filter((peer) => !peer.isForked()));
if (peersNotForked.length === 0) {
this.logger.error(
`Could not download blocks: We have ${peersAll.length} peer(s) but all ` +
`of them are on a different chain than us`,
);
return [];
}
const networkHeight: number = this.getNetworkHeight();
let chunksMissingToSync: number;
if (!networkHeight || networkHeight <= fromBlockHeight) {
chunksMissingToSync = 1;
} else {
chunksMissingToSync = Math.ceil((networkHeight - fromBlockHeight) / this.downloadChunkSize);
}
const chunksToDownload: number = Math.min(chunksMissingToSync, peersNotForked.length, maxParallelDownloads);
// We must return an uninterrupted sequence of blocks, starting from `fromBlockHeight`,
// with sequential heights, without gaps.
const downloadJobs = [];
const downloadResults: any = [];
let someJobFailed: boolean = false;
let chunksHumanReadable: string = "";
for (let i = 0; i < chunksToDownload; i++) {
const height: number = fromBlockHeight + this.downloadChunkSize * i;
const isLastChunk: boolean = i === chunksToDownload - 1;
const blocksRange: string = `[${(height + 1).toLocaleString()}, ${(isLastChunk
? ".."
: height + this.downloadChunkSize
).toLocaleString()}]`;
//@ts-ignore
downloadJobs.push(async () => {
if (this.chunkCache.has(blocksRange)) {
downloadResults[i] = this.chunkCache.get(blocksRange);
// Remove it from the cache so that it does not get served many times
// from the cache. In case of network reorganization or downloading
// flawed chunks we want to re-download from another peer.
this.chunkCache.remove(blocksRange);
return;
}
let blocks!: Interfaces.IBlockData[];
let peer: Contracts.P2P.Peer;
let peerPrint!: string;
// As a first peer to try, pick such a peer that different jobs use different peers.
// If that peer fails then pick randomly from the remaining peers that have not
// been first-attempt for any job.
const peersToTry = [peersNotForked[i], ...Utils.shuffle(peersNotForked.slice(chunksToDownload))];
if (peersToTry.length === 1) {
// special case where we don't have "backup peers" (that have not been first-attempt for any job)
// so add peers that have been first-attempt as backup peers
peersToTry.push(...peersNotForked.filter((p) => p.ip !== peersNotForked[i].ip));
}
for (peer of peersToTry) {
peerPrint = `${peer.ip}:${peer.port}`;
try {
blocks = await this.communicator.getPeerBlocks(peer, {
fromBlockHeight: height,
blockLimit: this.downloadChunkSize,
});
if (blocks.length > 0 || isLastChunk) {
// when `isLastChunk` it can be normal that the peer does not send any block (when none were forged)
this.logger.debug(
`Downloaded blocks ${blocksRange} (${blocks.length}) ` + `from ${peerPrint}`,
);
downloadResults[i] = blocks;
return;
} else {
throw new Error("Peer did not return any block");
}
} catch (error) {
this.logger.info(
`Failed to download blocks ${blocksRange} from ${peerPrint}: ${error.message}`,
);
}
if (someJobFailed) {
this.logger.info(
`Giving up on trying to download blocks ${blocksRange}: ` + `another download job failed`,
);
}
}
someJobFailed = true;
throw new Error(
`Could not download blocks ${blocksRange} from any of ${peersToTry.length} ` +
`peer(s). Last attempt returned ${blocks.length} block(s) from peer ${peerPrint}.`,
);
});
if (chunksHumanReadable.length > 0) {
chunksHumanReadable += ", ";
}
chunksHumanReadable += blocksRange;
}
this.logger.debug(`Downloading blocks in chunks: ${chunksHumanReadable}`);
let firstFailureMessage!: string;
try {
// Convert the array of AsyncFunction to an array of Promise by calling the functions.
// @ts-ignore
await Promise.all(downloadJobs.map((f) => f()));
} catch (error) {
firstFailureMessage = error.message;
}
let downloadedBlocks: Interfaces.IBlockData[] = [];
let i;
for (i = 0; i < chunksToDownload; i++) {
if (downloadResults[i] === undefined) {
this.logger.error(firstFailureMessage);
break;
}
downloadedBlocks = [...downloadedBlocks, ...downloadResults[i]];
}
// Save any downloaded chunks that are higher than a failed chunk for later reuse.
for (i++; i < chunksToDownload; i++) {
if (downloadResults[i]) {
const height: number = fromBlockHeight + this.downloadChunkSize * i;
const blocksRange: string = `[${(height + 1).toLocaleString()}, ${(
height + this.downloadChunkSize
).toLocaleString()}]`;
this.chunkCache.set(blocksRange, downloadResults[i]);
}
}
// if we did not manage to download any block, reduce chunk size for next time
this.downloadChunkSize =
downloadedBlocks.length === 0 ? Math.ceil(this.downloadChunkSize / 10) : defaultDownloadChunkSize;
return downloadedBlocks;
}
public async broadcastBlock(block: Interfaces.IBlock): Promise<void> {
const blockchain = this.app.get<Contracts.Blockchain.Blockchain>(Container.Identifiers.BlockchainService);
let blockPing = blockchain.getBlockPing();
let peers: Contracts.P2P.Peer[] = this.repository.getPeers();
if (blockPing && blockPing.block.id === block.data.id && !blockPing.fromForger) {
// wait a bit before broadcasting if a bit early
const diff = blockPing.last - blockPing.first;
const maxHop = 4;
let broadcastQuota: number = (maxHop - blockPing.count) / maxHop;
if (diff < 500 && broadcastQuota > 0) {
await Utils.sleep(500 - diff);
blockPing = blockchain.getBlockPing()!;
// got aleady a new block, no broadcast
if (blockPing.block.height !== block.data.height) {
return;
}
broadcastQuota = (maxHop - blockPing.count) / maxHop;
}
peers = broadcastQuota <= 0 ? [] : Utils.shuffle(peers).slice(0, Math.ceil(broadcastQuota * peers.length));
// select a portion of our peers according to quota calculated before
}
this.logger.info(
`Broadcasting block ${block.data.height.toLocaleString()} to ${Utils.pluralize(
"peer",
peers.length,
true,
)}`,
);
await Promise.all(peers.map((peer) => this.communicator.postBlock(peer, block)));
}
private async pingPeerPorts(pingAll?: boolean): Promise<void> {
let peers = this.repository.getPeers();
if (!pingAll) {
peers = Utils.shuffle(peers).slice(0, Math.floor(peers.length / 2));
}
this.logger.debug(`Checking ports of ${Utils.pluralize("peer", peers.length, true)}.`);
Promise.all(peers.map((peer) => this.communicator.pingPorts(peer)));
}
private async checkDNSConnectivity(options): Promise<void> {
try {
const host = await checkDNS(this.app, options);
this.logger.info(`Your network connectivity has been verified by ${host}`);
} catch (error) {
this.logger.error(error.message);
}
}
private async checkNTPConnectivity(options): Promise<void> {
try {
const { host, time } = await checkNTP(this.app, options);
this.logger.info(`Your NTP connectivity has been verified by ${host}`);
this.logger.info(`Local clock is off by ${time.t < 0 ? "-" : ""}${prettyMs(Math.abs(time.t))} from NTP`);
} catch (error) {
this.logger.error(error.message);
}
}
private async scheduleUpdateNetworkStatus(nextUpdateInSeconds): Promise<void> {
if (this.nextUpdateNetworkStatusScheduled) {
return;
}
this.nextUpdateNetworkStatusScheduled = true;
await Utils.sleep(nextUpdateInSeconds * 1000);
this.nextUpdateNetworkStatusScheduled = false;
this.updateNetworkStatus();
}
private hasMinimumPeers(): boolean {
if (this.config.ignoreMinimumNetworkReach) {
this.logger.warning("Ignored the minimum network reach because the relay is in seed mode.");
return true;
}
return Object.keys(this.repository.getPeers()).length >= this.config.minimumNetworkReach;
}
private async populateSeedPeers(): Promise<any> {
const peerList: Contracts.P2P.PeerData[] = this.app.config("peers").list;
try {
const peersFromUrl = await this.loadPeersFromUrlList();
for (const peer of peersFromUrl) {
if (!peerList.find((p) => p.ip === peer.ip)) {
peerList.push({
ip: peer.ip,
port: peer.port,
});
}
}
} catch {}
if (!peerList || !peerList.length) {
this.app.terminate("No seed peers defined in peers.json");
}
const peers: Contracts.P2P.Peer[] = peerList.map((peer) => {
const peerInstance = new Peer(peer.ip, peer.port);
peerInstance.version = this.app.version();
return peerInstance;
});
return Promise.all(
// @ts-ignore
Object.values(peers).map((peer: Contracts.P2P.Peer) => {
this.repository.forgetPeer(peer);
return this.app
.get<Services.Triggers.Triggers>(Container.Identifiers.TriggerService)
.call("validateAndAcceptPeer", { peer, options: { seed: true, lessVerbose: true } });
}),
);
}
private async loadPeersFromUrlList(): Promise<Array<{ ip: string; port: number }>> {
const urls: string[] = this.app.config("peers").sources || [];
for (const url of urls) {
// Local File...
if (url.startsWith("/")) {
return require(url);
}
// URL...
this.logger.debug(`GET ${url}`);
const { data } = await Utils.http.get(url);
return typeof data === "object" ? data : JSON.parse(data);
}
return [];
}
} | the_stack |
import { SourceFile } from "typescript";
import { ComponentAnalyzer } from "./component-analyzer/component-analyzer.js";
import { LitCssDocumentAnalyzer } from "./document-analyzer/css/lit-css-document-analyzer.js";
import { LitHtmlDocumentAnalyzer } from "./document-analyzer/html/lit-html-document-analyzer.js";
import { renameLocationsForTagName } from "./document-analyzer/html/rename-locations/rename-locations-for-tag-name.js";
import { LitAnalyzerContext } from "./lit-analyzer-context.js";
import { CssDocument } from "./parse/document/text-document/css-document/css-document.js";
import { HtmlDocument } from "./parse/document/text-document/html-document/html-document.js";
import { TextDocument } from "./parse/document/text-document/text-document.js";
import { setTypescriptModule } from "./ts-module.js";
import { LitClosingTagInfo } from "./types/lit-closing-tag-info.js";
import { LitCodeFix } from "./types/lit-code-fix.js";
import { LitCompletion } from "./types/lit-completion.js";
import { LitCompletionDetails } from "./types/lit-completion-details.js";
import { LitDefinition } from "./types/lit-definition.js";
import { LitDiagnostic } from "./types/lit-diagnostic.js";
import { LitFormatEdit } from "./types/lit-format-edit.js";
import { LitOutliningSpan } from "./types/lit-outlining-span.js";
import { LitQuickInfo } from "./types/lit-quick-info.js";
import { LitRenameInfo } from "./types/lit-rename-info.js";
import { LitRenameLocation } from "./types/lit-rename-location.js";
import { DocumentOffset, Range, SourceFilePosition } from "./types/range.js";
import { arrayFlat } from "./util/array-util.js";
import { getNodeAtPosition, nodeIntersects } from "./util/ast-util.js";
import { iterableFirst } from "./util/iterable-util.js";
import { makeSourceFileRange, sfRangeToDocumentRange } from "./util/range-util.js";
export class LitAnalyzer {
private litHtmlDocumentAnalyzer = new LitHtmlDocumentAnalyzer();
private litCssDocumentAnalyzer = new LitCssDocumentAnalyzer();
private componentAnalyzer = new ComponentAnalyzer();
constructor(private context: LitAnalyzerContext) {
// Set the Typescript module
// I plan on removing this function, so only "context.ts" is used.
setTypescriptModule(context.ts);
}
getOutliningSpansInFile(file: SourceFile): LitOutliningSpan[] {
this.context.setContextBase({ file });
const documents = this.getDocumentsInFile(file);
this.context.updateComponents(file);
return arrayFlat(
documents.map(document => {
if (document instanceof CssDocument) {
return [];
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getOutliningSpans(document);
}
return [];
})
);
}
getDefinitionAtPosition(file: SourceFile, position: SourceFilePosition): LitDefinition | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document == null) return undefined;
this.context.updateComponents(file);
if (document instanceof CssDocument) {
return this.litCssDocumentAnalyzer.getDefinitionAtOffset(document, offset, this.context);
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getDefinitionAtOffset(document, offset, this.context);
}
return;
}
getQuickInfoAtPosition(file: SourceFile, position: SourceFilePosition): LitQuickInfo | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document == null) return undefined;
this.context.updateComponents(file);
if (document instanceof CssDocument) {
return this.litCssDocumentAnalyzer.getQuickInfoAtOffset(document, offset, this.context);
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getQuickInfoAtOffset(document, offset, this.context);
}
return;
}
getRenameInfoAtPosition(file: SourceFile, position: SourceFilePosition): LitRenameInfo | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document != null) {
if (document instanceof CssDocument) {
return undefined;
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getRenameInfoAtOffset(document, offset, this.context);
}
} else {
const nodeUnderCursor = getNodeAtPosition(file, position);
if (nodeUnderCursor == null) return undefined;
if (this.context.ts.isStringLiteralLike(nodeUnderCursor)) {
const tagName = nodeUnderCursor.text;
const definition = this.context.definitionStore.getDefinitionForTagName(tagName);
if (definition != null && nodeIntersects(nodeUnderCursor, iterableFirst(definition.tagNameNodes)!)) {
return {
fullDisplayName: tagName,
displayName: tagName,
range: makeSourceFileRange({ start: nodeUnderCursor.getStart() + 1, end: nodeUnderCursor.getEnd() - 1 }),
kind: "label",
target: definition
};
}
}
}
return;
}
getRenameLocationsAtPosition(file: SourceFile, position: SourceFilePosition): LitRenameLocation[] {
this.context.setContextBase({ file });
const renameInfo = this.getRenameInfoAtPosition(file, position);
if (renameInfo == null) return [];
if ("document" in renameInfo) {
const document = renameInfo.document;
const offset = document.virtualDocument.sfPositionToDocumentOffset(position);
if (document instanceof CssDocument) {
return [];
} else {
return this.litHtmlDocumentAnalyzer.getRenameLocationsAtOffset(document, offset, this.context);
}
} else {
return renameLocationsForTagName(renameInfo.target.tagName, this.context);
}
}
getClosingTagAtPosition(file: SourceFile, position: SourceFilePosition): LitClosingTagInfo | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document == null) return undefined;
this.context.updateComponents(file);
if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getClosingTagAtOffset(document, offset);
}
return;
}
getCompletionDetailsAtPosition(file: SourceFile, position: SourceFilePosition, name: string): LitCompletionDetails | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document == null) return undefined;
if (document instanceof CssDocument) {
return this.litCssDocumentAnalyzer.getCompletionDetailsAtOffset(document, offset, name, this.context);
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getCompletionDetailsAtOffset(document, offset, name, this.context);
}
return;
}
getCompletionsAtPosition(file: SourceFile, position: SourceFilePosition): LitCompletion[] | undefined {
this.context.setContextBase({ file });
const { document, offset } = this.getDocumentAndOffsetAtPosition(file, position);
if (document == null) return undefined;
this.context.updateComponents(file);
if (document instanceof CssDocument) {
return this.litCssDocumentAnalyzer.getCompletionsAtOffset(document, offset, this.context);
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getCompletionsAtOffset(document, offset, this.context);
}
return;
}
getDiagnosticsInFile(file: SourceFile): LitDiagnostic[] {
this.context.setContextBase({ file, timeout: 7000, throwOnCancellation: true });
this.context.updateComponents(file);
this.context.updateDependencies(file);
const documents = this.getDocumentsInFile(file);
const diagnostics: LitDiagnostic[] = [];
// Get diagnostics for components definitions in this file
const definitions = this.context.definitionStore.getDefinitionsWithDeclarationInFile(file);
for (const definition of definitions) {
if (this.context.isCancellationRequested) {
break;
}
diagnostics.push(...this.componentAnalyzer.getDiagnostics(definition, this.context));
}
// Get diagnostics for components in this file
const declarations = this.context.definitionStore.getComponentDeclarationsInFile(file);
for (const declaration of declarations) {
if (this.context.isCancellationRequested) {
break;
}
diagnostics.push(...this.componentAnalyzer.getDiagnostics(declaration, this.context));
}
// Get diagnostics for documents in this file
for (const document of documents) {
if (this.context.isCancellationRequested) {
break;
}
if (document instanceof CssDocument) {
diagnostics.push(...this.litCssDocumentAnalyzer.getDiagnostics(document, this.context));
} else if (document instanceof HtmlDocument) {
diagnostics.push(...this.litHtmlDocumentAnalyzer.getDiagnostics(document, this.context));
}
}
return diagnostics;
}
getCodeFixesAtPositionRange(file: SourceFile, sourceFileRange: Range): LitCodeFix[] {
this.context.setContextBase({ file });
const { document } = this.getDocumentAndOffsetAtPosition(file, sourceFileRange.start);
this.context.updateComponents(file);
this.context.updateDependencies(file);
// Return fixes for intersecting document
if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getCodeFixesAtOffsetRange(document, sfRangeToDocumentRange(document, sourceFileRange), this.context);
}
// Else, return fixes for components in this file
else {
const definitions = this.context.definitionStore.getDefinitionsWithDeclarationInFile(file);
for (const definition of definitions) {
const result = this.componentAnalyzer.getCodeFixesAtOffsetRange(definition, makeSourceFileRange(sourceFileRange), this.context);
if (result.length > 0) {
return result;
}
}
const components = this.context.definitionStore.getComponentDeclarationsInFile(file);
for (const component of components) {
const result = this.componentAnalyzer.getCodeFixesAtOffsetRange(component, makeSourceFileRange(sourceFileRange), this.context);
if (result.length > 0) {
return result;
}
}
}
return [];
}
getFormatEditsInFile(file: SourceFile, settings: ts.FormatCodeSettings): LitFormatEdit[] {
this.context.setContextBase({ file });
const documents = this.getDocumentsInFile(file);
return arrayFlat(
documents.map(document => {
if (document instanceof CssDocument) {
return [];
} else if (document instanceof HtmlDocument) {
return this.litHtmlDocumentAnalyzer.getFormatEdits(document, settings);
}
return [];
})
);
}
private getDocumentAndOffsetAtPosition(
sourceFile: SourceFile,
position: SourceFilePosition
): { document: TextDocument | undefined; offset: DocumentOffset } {
const document = this.context.documentStore.getDocumentAtPosition(sourceFile, position, this.context.config);
return {
document,
offset: document != null ? document.virtualDocument.sfPositionToDocumentOffset(position) : -1
};
}
private getDocumentsInFile(sourceFile: SourceFile): TextDocument[] {
return this.context.documentStore.getDocumentsInFile(sourceFile, this.context.config);
}
} | the_stack |
import * as vscode from "vscode";
import * as path from "path";
import * as mkdirp from "mkdirp";
import { logger } from "vscode-debugadapter";
import { DebugProtocol } from "vscode-debugprotocol";
import { ProjectVersionHelper } from "../common/projectVersionHelper";
import { TelemetryHelper } from "../common/telemetryHelper";
import { MultipleLifetimesAppWorker } from "./appWorker";
import { RnCDPMessageHandler } from "../cdp-proxy/CDPMessageHandlers/rnCDPMessageHandler";
import {
DebugSessionBase,
DebugSessionStatus,
IAttachRequestArgs,
ILaunchRequestArgs,
} from "./debugSessionBase";
import { JsDebugConfigAdapter } from "./jsDebugConfigAdapter";
import { ErrorHelper } from "../common/error/errorHelper";
import { InternalErrorCode } from "../common/error/internalErrorCode";
import * as nls from "vscode-nls";
nls.config({
messageFormat: nls.MessageFormat.bundle,
bundleFormat: nls.BundleFormat.standalone,
})();
const localize = nls.loadMessageBundle();
export class RNDebugSession extends DebugSessionBase {
private readonly terminateCommand: string;
private appWorker: MultipleLifetimesAppWorker | null;
private nodeSession: vscode.DebugSession | null;
private onDidStartDebugSessionHandler: vscode.Disposable;
private onDidTerminateDebugSessionHandler: vscode.Disposable;
constructor(session: vscode.DebugSession) {
super(session);
// constants definition
this.terminateCommand = "terminate"; // the "terminate" command is sent from the client to the debug adapter in order to give the debuggee a chance for terminating itself
// variables definition
this.appWorker = null;
this.onDidStartDebugSessionHandler = vscode.debug.onDidStartDebugSession(
this.handleStartDebugSession.bind(this),
);
this.onDidTerminateDebugSessionHandler = vscode.debug.onDidTerminateDebugSession(
this.handleTerminateDebugSession.bind(this),
);
}
protected async launchRequest(
response: DebugProtocol.LaunchResponse,
launchArgs: ILaunchRequestArgs,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
request?: DebugProtocol.Request,
): Promise<void> {
try {
try {
await this.initializeSettings(launchArgs);
logger.log("Launching the application");
logger.verbose(`Launching the application: ${JSON.stringify(launchArgs, null, 2)}`);
await this.appLauncher.launch(launchArgs);
if (!launchArgs.enableDebug) {
this.sendResponse(response);
// if debugging is not enabled skip attach request
return;
}
} catch (error) {
throw ErrorHelper.getInternalError(
InternalErrorCode.ApplicationLaunchFailed,
error.message || error,
);
}
// if debugging is enabled start attach request
await this.attachRequest(response, launchArgs);
} catch (error) {
this.showError(error, response);
}
}
protected async attachRequest(
response: DebugProtocol.AttachResponse,
attachArgs: IAttachRequestArgs,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
request?: DebugProtocol.Request,
): Promise<void> {
let extProps = {
platform: {
value: attachArgs.platform,
isPii: false,
},
};
this.previousAttachArgs = attachArgs;
return new Promise<void>(async (resolve, reject) => {
try {
await this.initializeSettings(attachArgs);
logger.log("Attaching to the application");
logger.verbose(
`Attaching to the application: ${JSON.stringify(attachArgs, null, 2)}`,
);
const versions = await ProjectVersionHelper.getReactNativeVersions(
this.projectRootPath,
ProjectVersionHelper.generateAdditionalPackagesToCheckByPlatform(attachArgs),
);
extProps = TelemetryHelper.addPlatformPropertiesToTelemetryProperties(
attachArgs,
versions,
extProps,
);
// eslint-disable-next-line @typescript-eslint/no-unused-vars
await TelemetryHelper.generate("attach", extProps, async generator => {
attachArgs.port =
attachArgs.port || this.appLauncher.getPackagerPort(attachArgs.cwd);
const cdpProxy = this.appLauncher.getRnCdpProxy();
await cdpProxy.stopServer();
await cdpProxy.initializeServer(
new RnCDPMessageHandler(),
this.cdpProxyLogLevel,
this.cancellationTokenSource.token,
);
await this.appLauncher.getPackager().start();
logger.log(
localize("StartingDebuggerAppWorker", "Starting debugger app worker."),
);
const sourcesStoragePath = path.join(this.projectRootPath, ".vscode", ".react");
// Create folder if not exist to avoid problems if
// RN project root is not a ${workspaceFolder}
mkdirp.sync(sourcesStoragePath);
// If launch is invoked first time, appWorker is undefined, so create it here
this.appWorker = new MultipleLifetimesAppWorker(
attachArgs,
sourcesStoragePath,
this.projectRootPath,
this.cancellationTokenSource.token,
undefined,
);
this.appLauncher.setAppWorker(this.appWorker);
this.appWorker.on("connected", (port: number) => {
if (this.cancellationTokenSource.token.isCancellationRequested) {
return this.appWorker?.stop();
}
logger.log(
localize(
"DebuggerWorkerLoadedRuntimeOnPort",
"Debugger worker loaded runtime on port {0}",
port,
),
);
cdpProxy.setApplicationTargetPort(port);
if (this.debugSessionStatus === DebugSessionStatus.ConnectionPending) {
return;
}
if (this.debugSessionStatus === DebugSessionStatus.FirstConnection) {
this.debugSessionStatus = DebugSessionStatus.FirstConnectionPending;
this.establishDebugSession(attachArgs, resolve);
} else if (
this.debugSessionStatus === DebugSessionStatus.ConnectionAllowed
) {
if (this.nodeSession) {
this.debugSessionStatus = DebugSessionStatus.ConnectionPending;
this.nodeSession.customRequest(this.terminateCommand);
}
}
});
if (this.cancellationTokenSource.token.isCancellationRequested) {
return this.appWorker.stop();
}
return await this.appWorker.start();
});
} catch (error) {
reject(error);
}
}).catch(err =>
this.showError(
ErrorHelper.getInternalError(
InternalErrorCode.CouldNotAttachToDebugger,
err.message || err,
),
response,
),
);
}
protected async disconnectRequest(
response: DebugProtocol.DisconnectResponse,
args: DebugProtocol.DisconnectArguments,
request?: DebugProtocol.Request,
): Promise<void> {
// The client is about to disconnect so first we need to stop app worker
if (this.appWorker) {
this.appWorker.stop();
}
this.onDidStartDebugSessionHandler.dispose();
this.onDidTerminateDebugSessionHandler.dispose();
return super.disconnectRequest(response, args, request);
}
protected establishDebugSession(
attachArgs: IAttachRequestArgs,
resolve?: (value?: void | PromiseLike<void> | undefined) => void,
): void {
const attachConfiguration = JsDebugConfigAdapter.createDebuggingConfigForPureRN(
attachArgs,
this.appLauncher.getCdpProxyPort(),
this.session.id,
);
vscode.debug
.startDebugging(this.appLauncher.getWorkspaceFolder(), attachConfiguration, {
parentSession: this.session,
consoleMode: vscode.DebugConsoleMode.MergeWithParent,
})
.then(
(childDebugSessionStarted: boolean) => {
if (childDebugSessionStarted) {
this.debugSessionStatus = DebugSessionStatus.ConnectionDone;
this.setConnectionAllowedIfPossible();
if (resolve) {
this.debugSessionStatus = DebugSessionStatus.ConnectionAllowed;
resolve();
}
} else {
this.debugSessionStatus = DebugSessionStatus.ConnectionFailed;
this.setConnectionAllowedIfPossible();
this.resetFirstConnectionStatus();
throw new Error("Cannot start child debug session");
}
},
err => {
this.debugSessionStatus = DebugSessionStatus.ConnectionFailed;
this.setConnectionAllowedIfPossible();
this.resetFirstConnectionStatus();
throw err;
},
);
}
private handleStartDebugSession(debugSession: vscode.DebugSession): void {
if (
debugSession.configuration.rnDebugSessionId === this.session.id &&
debugSession.type === this.pwaNodeSessionName
) {
this.nodeSession = debugSession;
}
}
private handleTerminateDebugSession(debugSession: vscode.DebugSession): void {
if (
debugSession.configuration.rnDebugSessionId === this.session.id &&
debugSession.type === this.pwaNodeSessionName
) {
if (this.debugSessionStatus === DebugSessionStatus.ConnectionPending) {
this.establishDebugSession(this.previousAttachArgs);
} else {
vscode.commands.executeCommand(this.stopCommand, this.session);
}
}
}
private setConnectionAllowedIfPossible(): void {
if (
this.debugSessionStatus === DebugSessionStatus.ConnectionDone ||
this.debugSessionStatus === DebugSessionStatus.ConnectionFailed
) {
this.debugSessionStatus = DebugSessionStatus.ConnectionAllowed;
}
}
private resetFirstConnectionStatus(): void {
if (this.debugSessionStatus === DebugSessionStatus.FirstConnectionPending) {
this.debugSessionStatus = DebugSessionStatus.FirstConnection;
}
}
} | the_stack |
import {
makeApp as makeElectronApp,
makeBrowserWindow,
makeClientForPlugin,
makeProcess
} from '@bugsnag/electron-test-helpers'
import plugin from '../'
const ONE_HOUR_IN_MS = 60 * 60 * 1000
// expected data for 'session.app'
const makeExpectedSessionApp = (customisations = {}) => ({
releaseStage: 'production',
type: undefined,
version: '1.2.3',
...customisations
})
// expected data for 'event.app'
const makeExpectedEventApp = (customisations = {}) => {
const app = makeExpectedSessionApp()
return {
...app,
inForeground: false,
isLaunching: true,
duration: expect.any(Number),
...customisations
}
}
// expected data synced to NativeClient
const makeExpectedNativeClientApp = (customisations = {}) => {
const app = makeExpectedEventApp()
delete app.duration
return { ...app, ...customisations }
}
// expected data for 'event.metadata.app'
const makeExpectedMetadataApp = (customisations = {}) => ({
name: 'my cool app :^)',
...customisations
})
describe('plugin: electron app info', () => {
afterEach(() => { jest.useRealTimers() })
it('reports basic app info', async () => {
const { sendEvent, sendSession } = makeClient()
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp())
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
})
it('reports app.type for macOS', async () => {
const process = makeProcess({ platform: 'darwin' })
const { sendEvent, sendSession } = makeClient({ process })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ type: 'macOS' }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp({ type: 'macOS' }))
})
it('reports app.type for Windows', async () => {
const process = makeProcess({ platform: 'win32' })
const { sendEvent, sendSession } = makeClient({ process })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ type: 'Windows' }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp({ type: 'Windows' }))
})
it('reports app.type for Linux', async () => {
const process = makeProcess({ platform: 'linux' })
const { sendEvent, sendSession } = makeClient({ process })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ type: 'Linux' }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp({ type: 'Linux' }))
})
it('reports app.version and metadata.app.CFBundleVersion for macOS', async () => {
const process = makeProcess({ platform: 'darwin' })
const electronApp = makeElectronApp({ version: '5.4.6' })
const { sendEvent, sendSession } = makeClient({
electronApp,
process,
NativeApp: {
getPackageVersion: () => '5.4.6',
getBundleVersion: () => '8.7.9'
}
})
const expected = { type: 'macOS', version: '5.4.6' }
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp(expected))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp({ CFBundleVersion: '8.7.9' }))
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp(expected))
})
it('reports app.version for Windows', async () => {
const process = makeProcess({ platform: 'win32' })
const electronApp = makeElectronApp({ version: '1.0.0' })
const { sendEvent, sendSession } = makeClient({
electronApp,
process,
NativeApp: {
getPackageVersion: () => '1.3.4',
getBundleVersion: () => null
}
})
const expected = { type: 'Windows', version: '1.3.4' }
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp(expected))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp(expected))
})
it('reports app.version for Linux', async () => {
const process = makeProcess({ platform: 'linux' })
const electronApp = makeElectronApp({ version: '9.8.7' })
const { sendEvent, sendSession } = makeClient({ electronApp, process })
const expected = { type: 'Linux', version: '9.8.7' }
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp(expected))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp(expected))
})
it('reports if the app was installed from the macOS App Store', async () => {
const process = makeProcess({ mas: true })
const { sendEvent, sendSession } = makeClient({ process })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp())
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp({ installedFromStore: 'mac' }))
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
})
it('reports if the app was installed from the Windows Store', async () => {
const process = makeProcess({ windowsStore: true })
const { sendEvent, sendSession } = makeClient({ process })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp())
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp({ installedFromStore: 'windows' }))
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
})
it('tracks focus and blur events for inForeground', async () => {
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
const { sendEvent } = makeClient({ BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ inForeground: false, durationInForeground: undefined }))
electronApp._createWindow()
electronApp._emitFocusEvent()
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ inForeground: true, durationInForeground: expect.any(Number) }))
electronApp._emitBlurEvent()
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({ inForeground: false, durationInForeground: undefined }))
})
it('tracks multiple browser windows correctly for inForeground', async () => {
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
// create 2 windows before loading the plugin
electronApp._createWindow()
electronApp._createWindow()
const { sendEvent } = makeClient({ BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ inForeground: true, durationInForeground: expect.any(Number) }))
// blur the current window and focus the other window
electronApp._emitBlurEvent()
electronApp._emitFocusEvent()
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ inForeground: true, durationInForeground: expect.any(Number) }))
electronApp._emitBlurEvent()
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({ inForeground: false, durationInForeground: undefined }))
})
it('handles "inForeground" when all windows are closed', async () => {
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
electronApp._createWindow()
electronApp._createWindow()
electronApp._createWindow()
const { sendEvent } = makeClient({ BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ inForeground: true, durationInForeground: expect.any(Number) }))
// close all of the windows
BrowserWindow.getAllWindows().forEach(window => { electronApp._closeWindow(window) })
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ inForeground: false, durationInForeground: undefined }))
})
it('reports the app.duration and app.durationInForeground', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const creationTime = now - ONE_HOUR_IN_MS
const process = makeProcess({ creationTime })
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
electronApp._createWindow()
const { sendEvent } = makeClient({ process, BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({
inForeground: true,
duration: now - creationTime,
durationInForeground: now - creationTime
}))
const sleepDurationMs = 100
jest.advanceTimersByTime(sleepDurationMs)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({
inForeground: true,
duration: now - creationTime + sleepDurationMs,
durationInForeground: now - creationTime + sleepDurationMs
}))
})
it('reports the app.duration and app.durationInForeground when process.getCreationTime returns null', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const process = makeProcess({ creationTime: null })
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
electronApp._createWindow()
const { sendEvent } = makeClient({ process, BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({
inForeground: true,
duration: 0,
durationInForeground: 0
}))
const sleepDurationMs = 100
jest.advanceTimersByTime(sleepDurationMs)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({
inForeground: true,
duration: sleepDurationMs,
durationInForeground: sleepDurationMs
}))
})
it('reports the app.durationInForeground after backgrounding', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const creationTime = now - ONE_HOUR_IN_MS
const process = makeProcess({ creationTime })
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
const { sendEvent } = makeClient({ process, BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({
inForeground: false,
durationInForeground: undefined
}))
electronApp._createWindow()
const sleepDurationMs = 500
jest.advanceTimersByTime(sleepDurationMs)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs
}))
electronApp._emitBlurEvent()
jest.advanceTimersByTime(sleepDurationMs)
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({
inForeground: false,
durationInForeground: undefined
}))
electronApp._emitFocusEvent()
jest.advanceTimersByTime(sleepDurationMs * 2)
const event4 = await sendEvent()
expect(event4.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs * 2
}))
})
it('reports the app.durationInForeground after backgrounding when process.getCreationTime returns null', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const process = makeProcess({ creationTime: null })
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
const { sendEvent } = makeClient({ process, BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({
inForeground: false,
durationInForeground: undefined
}))
electronApp._createWindow()
const sleepDurationMs = 500
jest.advanceTimersByTime(sleepDurationMs)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs
}))
electronApp._emitBlurEvent()
jest.advanceTimersByTime(sleepDurationMs)
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({
inForeground: false,
durationInForeground: undefined
}))
electronApp._emitFocusEvent()
jest.advanceTimersByTime(sleepDurationMs * 2)
const event4 = await sendEvent()
expect(event4.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs * 2
}))
})
it('reports durationInForeground correctly across multiple browser windows', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const process = makeProcess({ creationTime: null })
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
electronApp._createWindow()
electronApp._createWindow()
const { sendEvent } = makeClient({ process, BrowserWindow, electronApp })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: 0
}))
// blur the current window and focus the other one
electronApp._emitBlurEvent()
electronApp._emitFocusEvent()
const sleepDurationMs = 500
jest.advanceTimersByTime(sleepDurationMs)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs
}))
// blur the second window so we're in the background now
electronApp._emitBlurEvent()
jest.advanceTimersByTime(sleepDurationMs)
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({
inForeground: false,
durationInForeground: undefined
}))
// re-focus a window so we're back in the foreground
electronApp._emitFocusEvent()
jest.advanceTimersByTime(sleepDurationMs)
const event4 = await sendEvent()
expect(event4.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs
}))
// switch focus to the second window, proving that we don't think we've been
// backgrounded
electronApp._emitFocusEvent()
jest.advanceTimersByTime(sleepDurationMs)
const event5 = await sendEvent()
expect(event5.app).toEqual(makeExpectedEventApp({
inForeground: true,
durationInForeground: sleepDurationMs * 2
}))
})
it('syncs basic data (excluding duration/durationInForeground) to NativeClient', async () => {
const NativeClient = makeNativeClient()
const { sendEvent, sendSession } = makeClient({ NativeClient })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp())
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp())
})
it('syncs inForeground to NativeClient after focus/blur events', () => {
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
const NativeClient = makeNativeClient()
makeClient({ NativeClient, BrowserWindow, electronApp })
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp({ inForeground: false }))
electronApp._createWindow()
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(2, makeExpectedNativeClientApp({ inForeground: true }))
electronApp._emitBlurEvent()
expect(NativeClient.setApp).toHaveBeenCalledTimes(3)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(3, makeExpectedNativeClientApp({ inForeground: false }))
})
it('handles exceptions from the NativeClient', async () => {
const BrowserWindow = makeBrowserWindow()
const electronApp = makeElectronApp({ BrowserWindow })
const NativeClient = makeNativeClient()
NativeClient.setApp.mockImplementation(() => { throw new Error('uh oh') })
electronApp._createWindow()
const { client, sendEvent, sendSession } = makeClient({ BrowserWindow, electronApp, NativeClient })
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp({ inForeground: true }))
expect(client._logger.error).toHaveBeenCalledTimes(1)
expect(client._logger.error).toHaveBeenCalledWith(new Error('uh oh'))
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ inForeground: true, durationInForeground: expect.any(Number) }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(client._logger.error).toHaveBeenCalledTimes(1)
// ensure NativeClient.setApp calls from blur/focus events are also handled
electronApp._emitBlurEvent()
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(2, makeExpectedNativeClientApp({ inForeground: false }))
expect(client._logger.error).toHaveBeenCalledTimes(2)
expect(client._logger.error).toHaveBeenNthCalledWith(2, new Error('uh oh'))
electronApp._emitFocusEvent()
expect(NativeClient.setApp).toHaveBeenCalledTimes(3)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(3, makeExpectedNativeClientApp({ inForeground: true }))
expect(client._logger.error).toHaveBeenCalledTimes(3)
expect(client._logger.error).toHaveBeenNthCalledWith(3, new Error('uh oh'))
})
it('can manually mark the app as not launching', async () => {
const { client, sendEvent, sendSession } = makeClient()
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session = await sendSession()
expect(session.app).toEqual(makeExpectedSessionApp())
const pluginInstance = client.getPlugin('electronApp')
pluginInstance.markLaunchComplete()
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ isLaunching: false }))
expect(event2.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const session2 = await sendSession()
expect(session2.app).toEqual(makeExpectedSessionApp())
})
it('does not sync multiple "markLaunchComplete" calls to native', async () => {
const NativeClient = makeNativeClient()
const { client } = makeClient({ NativeClient })
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp())
const pluginInstance = client.getPlugin('electronApp')
pluginInstance.markLaunchComplete()
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(2, makeExpectedNativeClientApp({ isLaunching: false }))
// as the app is already not launching, calling "markLaunchComplete" again
// should do nothing
pluginInstance.markLaunchComplete()
pluginInstance.markLaunchComplete()
pluginInstance.markLaunchComplete()
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
})
it('automatically marks the app as not launching after the default "launchDurationMillis" elapses', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const process = makeProcess({ creationTime: null })
const config = { launchDurationMillis: undefined }
const { sendEvent } = makeClient({ process, config })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
// advancing 1 second shouldn't affect isLaunching
jest.advanceTimersByTime(1000)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event2.getMetadata('app')).toEqual(makeExpectedMetadataApp())
// advance the remaining 4 seconds to cover the 5 second default 'launcDurationMillis'
jest.advanceTimersByTime(4000)
const event3 = await sendEvent()
expect(event3.app).toEqual(makeExpectedEventApp({ isLaunching: false }))
expect(event3.getMetadata('app')).toEqual(makeExpectedMetadataApp())
})
it('automatically marks the app as not launching after the configured "launchDurationMillis" elapses', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const process = makeProcess({ creationTime: null })
const config = { launchDurationMillis: 250 }
const { sendEvent } = makeClient({ process, config })
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
jest.advanceTimersByTime(250)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ isLaunching: false }))
expect(event2.getMetadata('app')).toEqual(makeExpectedMetadataApp())
})
it('does not sync "markLaunchComplete" calls after "launchDurationMillis" elapses', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const NativeClient = makeNativeClient()
const process = makeProcess({ creationTime: null })
const config = { launchDurationMillis: 250 }
const { client, sendEvent } = makeClient({ NativeClient, process, config })
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp())
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
// advance past launchDurationMillis
jest.advanceTimersByTime(250)
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ isLaunching: false }))
expect(event2.getMetadata('app')).toEqual(makeExpectedMetadataApp())
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(2, makeExpectedNativeClientApp({ isLaunching: false }))
// calling markLaunchComplete should do nothing as we're no longer launching
const pluginInstance = client.getPlugin('electronApp')
pluginInstance.markLaunchComplete()
pluginInstance.markLaunchComplete()
pluginInstance.markLaunchComplete()
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
})
it('does not sync "launchDurationMillis" elapsing after "markLaunchComplete" has been caled', async () => {
jest.useFakeTimers('modern')
const now = Date.now()
jest.setSystemTime(now)
const NativeClient = makeNativeClient()
const process = makeProcess({ creationTime: null })
const config = { launchDurationMillis: 250 }
const { client, sendEvent } = makeClient({ NativeClient, process, config })
expect(NativeClient.setApp).toHaveBeenCalledTimes(1)
expect(NativeClient.setApp).toHaveBeenCalledWith(makeExpectedNativeClientApp())
const event = await sendEvent()
expect(event.app).toEqual(makeExpectedEventApp({ isLaunching: true }))
expect(event.getMetadata('app')).toEqual(makeExpectedMetadataApp())
const pluginInstance = client.getPlugin('electronApp')
pluginInstance.markLaunchComplete()
const event2 = await sendEvent()
expect(event2.app).toEqual(makeExpectedEventApp({ isLaunching: false }))
expect(event2.getMetadata('app')).toEqual(makeExpectedMetadataApp())
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
expect(NativeClient.setApp).toHaveBeenNthCalledWith(2, makeExpectedNativeClientApp({ isLaunching: false }))
// advance past launchDurationMillis, this should do nothing as we've
// manually called "markLaunchComplete"
jest.advanceTimersByTime(250)
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
jest.advanceTimersByTime(250)
expect(NativeClient.setApp).toHaveBeenCalledTimes(2)
})
it('validates "launchDurationMillis" must be >= 0', async () => {
const config = { launchDurationMillis: -1234567890 }
const { client } = makeClient({ config })
expect((client._config as (typeof client._config & { launchDurationMillis: string })).launchDurationMillis).toBe(5000)
expect(client._logger.warn).toHaveBeenCalledWith(new Error(
'Invalid configuration\n - launchDurationMillis should be an integer ≥0, got -1234567890'
))
})
})
interface MakeClientOptions {
BrowserWindow?: any
electronApp?: any
NativeClient?: any
NativeApp?: any
process?: any
config?: { launchDurationMillis: number|undefined }
}
function makeClient ({
BrowserWindow = makeBrowserWindow(),
electronApp = makeElectronApp({ BrowserWindow }),
NativeClient = makeNativeClient(),
process = makeProcess(),
config = { launchDurationMillis: 0 },
NativeApp = makeNativeApp()
}: MakeClientOptions = {}): ReturnType<typeof makeClientForPlugin> {
return makeClientForPlugin({
config,
plugin: plugin(NativeClient, process, electronApp, BrowserWindow, NativeApp)
})
}
function makeNativeClient () {
return {
setApp: jest.fn()
}
}
function makeNativeApp () {
return { getPackageVersion: () => null, getBundleVersion: () => null }
} | the_stack |
import { ApplicationContext } from "../../main-core/application-context/mandarineApplicationContext.ts";
import { ORMCoreDecoratorProxy } from "../../orm-core/core/proxys/ormCoreDecoratorProxy.ts";
import { mockDecorator, Orange, Test, DenoAsserts } from "../mod.ts";
import { Types } from "../../orm-core/sql/types.ts";
import { MandarineRepository } from "../../orm-core/repository/mandarineRepository.ts";
import { RepositoryComponent } from "../../main-core/components/repository-component/repositoryComponent.ts";
import { lexicalProcessor } from "../../orm-core/core/lexicalProcessor.ts";
import { PostgreSQLDialect } from "../../orm-core/dialect/postgreSQLDialect.ts";
import type { Mandarine } from "../../main-core/Mandarine.ns.ts";
import { MysqlDialect } from "../../orm-core/dialect/mysqlDialect.ts";
@mockDecorator()
class MyTable {
@mockDecorator()
//@ts-ignore
private id: number;
@mockDecorator()
//@ts-ignore
private name: string;
@mockDecorator()
//@ts-ignore
private isAdult: boolean;
}
export class ORMTests {
constructor() {
Orange.setOptions(this, {
hooks: {
beforeEach: () => ApplicationContext.getInstance().getComponentsRegistry().clearComponentRegistry()
}
})
}
@Test({
name: "Create entity & columns",
description: "Create the representation of a table with columns"
})
public createEntity() {
ORMCoreDecoratorProxy.registerColumnDecorator(MyTable.prototype, <any><unknown> undefined, "isAdult");
ORMCoreDecoratorProxy.registerColumnDecorator(MyTable.prototype, <any><unknown> undefined, "name");
ORMCoreDecoratorProxy.registerColumnDecorator(MyTable.prototype, <any><unknown> undefined, "id");
ORMCoreDecoratorProxy.registerIdDecorator(MyTable.prototype, "id");
ORMCoreDecoratorProxy.registerGeneratedValueDecorator(MyTable.prototype, {
strategy: "SEQUENCE"
}, "id");
ORMCoreDecoratorProxy.registerTableDecorator(MyTable, {
name: "MyTable",
schema: "public"
});
let entity = ApplicationContext.getInstance().getEntityManager().entityRegistry.getEntity("public", "mytable");
DenoAsserts.assertEquals(entity?.tableName, "mytable");
DenoAsserts.assertEquals(entity?.schema, "public");
DenoAsserts.assertEquals(entity?.columns,[{
name: "isAdult",
length: 255,
scale: 2,
precision: 8,
nullable: true,
unique: false,
fieldName: "isAdult",
type: Types.BOOLEAN,
options: {}
},
{
name: "name",
length: 255,
scale: 2,
precision: 8,
nullable: true,
unique: false,
fieldName: "name",
type: Types.VARCHAR,
options: {}
},
{
name: "id",
length: 255,
scale: 2,
precision: 8,
nullable: false,
unique: true,
fieldName: "id",
type: Types.BIGINT,
options: {
primaryKey: true,
generatedValue: {
strategy: "SEQUENCE"
}
},
incrementStrategy: true
}]);
DenoAsserts.assertEquals(entity?.uniqueConstraints, [{
name: "id",
length: 255,
scale: 2,
precision: 8,
nullable: false,
unique: true,
fieldName: "id",
type: Types.BIGINT,
options: {
primaryKey: true,
generatedValue: {
strategy: "SEQUENCE"
}
},
incrementStrategy: true
}]);
DenoAsserts.assertEquals(entity?.primaryKey, {
name: "id",
length: 255,
scale: 2,
precision: 8,
nullable: false,
unique: true,
fieldName: "id",
type: Types.BIGINT,
options: {
primaryKey: true,
generatedValue: {
strategy: "SEQUENCE"
}
},
incrementStrategy: true
});
}
@Test({
name: "Create repository component",
description: "Create a mandarine-powered repository"
})
public createRepository() {
let myTable = this.createEntity();
@mockDecorator()
class MyRepository extends MandarineRepository<any> {
constructor() {
super(MyTable);
}
}
ORMCoreDecoratorProxy.registerComponentRepositoryDecorator(MyRepository);
ApplicationContext.getInstance().getComponentsRegistry().connectRepositoriesToProxy();
ApplicationContext.getInstance().getComponentsRegistry().resolveDependencies();
let repository = ApplicationContext.getInstance().getComponentsRegistry().get("MyRepository");
DenoAsserts.assert(repository?.componentInstance instanceof RepositoryComponent)
let handler = repository.componentInstance.getClassHandler();
DenoAsserts.assert(typeof handler.save === 'function');
DenoAsserts.assert(typeof handler.findAll === 'function');
DenoAsserts.assert(typeof handler.deleteAll === 'function');
DenoAsserts.assert(typeof handler.countAll === 'function');
}
@Test({
name: "[Postgres] Lexical processor",
description: "Should create SQL queries"
})
public useLexicalProcessor() {
// @ts-ignore
const fakeRepositoryProxy: Mandarine.ORM.RepositoryProxy = {
SUPPORTED_KEYWORDS: ["and", "or", "isnotnull", "isnull", "isempty", "isnotempty", "startingwith", "endswith", "like", "greaterthan", "lessthan"]
};
const fakeTableMetadata = {
name: "users",
schema: "public"
};
// @ts-ignore
const fakeEntity: Mandarine.ORM.Entity.Table = {
columns: [
{
name: "id"
},
{
name: "firstname"
},
{
name: "lastname"
},
{
name: "age"
},
{
name: "posts"
},
{
name: "country"
},
{
name: "CarModEl"
}
]
}
const dialect = new PostgreSQLDialect();
const countByCountry = lexicalProcessor(fakeRepositoryProxy, "countByCountry", "countBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(countByCountry, `SELECT COUNT(*) FROM public.users WHERE "country" = $1`);
const findByCountry = lexicalProcessor(fakeRepositoryProxy, "findByCountry", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountry, `SELECT * FROM public.users WHERE "country" = $1`);
const findByFirstnameAndCountry = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndCountry", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndCountry, `SELECT * FROM public.users WHERE "firstname" = $1 AND "country" = $2`);
const findByCountryIsNotNull = lexicalProcessor(fakeRepositoryProxy, "findByCountryIsNotNull", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountryIsNotNull, `SELECT * FROM public.users WHERE "country" IS NOT NULL`);
const findByCountryIsNull = lexicalProcessor(fakeRepositoryProxy, "findByCountryIsNull", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountryIsNull, `SELECT * FROM public.users WHERE "country" IS NULL`);
const findByFirstnameIsNotEmptyAndCountryIsEmpty = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameIsNotEmptyAndCountryIsEmpty", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameIsNotEmptyAndCountryIsEmpty, `SELECT * FROM public.users WHERE "firstname" <> '' AND "country" = ''`);
const findByLastnameStartingWith = lexicalProcessor(fakeRepositoryProxy, "findByLastnameStartingWith", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByLastnameStartingWith, `SELECT * FROM public.users WHERE "lastname" LIKE '' || $1 || '%'`);
const findByLastnameAndCountryLikeAndFirstnameEndsWith = lexicalProcessor(fakeRepositoryProxy, "findByLastnameAndCountryLikeAndFirstnameEndsWith", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByLastnameAndCountryLikeAndFirstnameEndsWith, `SELECT * FROM public.users WHERE "lastname" = $1 AND "country" LIKE '%' || $2 || '%' AND "firstname" LIKE '%' || $3 || ''`);
const findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike, `SELECT * FROM public.users WHERE "firstname" = $1 AND "country" = $2 AND "lastname" LIKE '%' || $3 || '' AND "firstname" IS NOT NULL OR "lastname" IS NULL AND "country" LIKE '%' || $4 || '%'`);
const findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan, `SELECT * FROM public.users WHERE "firstname" = $1 AND "age" > $2 AND "posts" < $3 OR "posts" > $4`);
const findByCarmodel = lexicalProcessor(fakeRepositoryProxy, "findByCarModel", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCarmodel, `SELECT * FROM public.users WHERE "CarModEl" = $1`);
}
@Test({
name: "[Mysql] Lexical processor",
description: "Should create SQL queries"
})
public useLexicalProcessorMysql() {
// @ts-ignore
const fakeRepositoryProxy: Mandarine.ORM.RepositoryProxy = {
SUPPORTED_KEYWORDS: ["and", "or", "isnotnull", "isnull", "isempty", "isnotempty", "startingwith", "endswith", "like", "greaterthan", "lessthan"]
};
const fakeTableMetadata = {
name: "users",
schema: "public"
};
// @ts-ignore
const fakeEntity: Mandarine.ORM.Entity.Table = {
columns: [
{
name: "id"
},
{
name: "firstname"
},
{
name: "lastname"
},
{
name: "age"
},
{
name: "posts"
},
{
name: "country"
},
{
name: "CarModEl"
}
]
}
const dialect = new MysqlDialect();
const countByCountry = lexicalProcessor(fakeRepositoryProxy, "countByCountry", "countBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(countByCountry, `SELECT COUNT(*) FROM public.users WHERE country = ?`);
const findByCountry = lexicalProcessor(fakeRepositoryProxy, "findByCountry", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountry, `SELECT * FROM public.users WHERE country = ?`);
const findByFirstnameAndCountry = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndCountry", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndCountry, `SELECT * FROM public.users WHERE firstname = ? AND country = ?`);
const findByCountryIsNotNull = lexicalProcessor(fakeRepositoryProxy, "findByCountryIsNotNull", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountryIsNotNull, `SELECT * FROM public.users WHERE country IS NOT NULL`);
const findByCountryIsNull = lexicalProcessor(fakeRepositoryProxy, "findByCountryIsNull", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCountryIsNull, `SELECT * FROM public.users WHERE country IS NULL`);
const findByFirstnameIsNotEmptyAndCountryIsEmpty = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameIsNotEmptyAndCountryIsEmpty", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameIsNotEmptyAndCountryIsEmpty, `SELECT * FROM public.users WHERE firstname <> '' AND country = ''`);
const findByLastnameStartingWith = lexicalProcessor(fakeRepositoryProxy, "findByLastnameStartingWith", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByLastnameStartingWith, `SELECT * FROM public.users WHERE lastname LIKE '' || ? || '%'`);
const findByLastnameAndCountryLikeAndFirstnameEndsWith = lexicalProcessor(fakeRepositoryProxy, "findByLastnameAndCountryLikeAndFirstnameEndsWith", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByLastnameAndCountryLikeAndFirstnameEndsWith, `SELECT * FROM public.users WHERE lastname = ? AND country LIKE '%' || ? || '%' AND firstname LIKE '%' || ? || ''`);
const findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndCountryAndLastnameEndsWithAndFirstnameIsNotNullOrLastnameIsNullAndCountryLike, `SELECT * FROM public.users WHERE firstname = ? AND country = ? AND lastname LIKE '%' || ? || '' AND firstname IS NOT NULL OR lastname IS NULL AND country LIKE '%' || ? || '%'`);
const findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan = lexicalProcessor(fakeRepositoryProxy, "findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByFirstnameAndAgeGreaterThanAndPostsLessThanOrPostsGreaterThan, `SELECT * FROM public.users WHERE firstname = ? AND age > ? AND posts < ? OR posts > ?`);
const findByCarmodel = lexicalProcessor(fakeRepositoryProxy, "findByCarModel", "findBy", fakeTableMetadata, fakeEntity, dialect);
DenoAsserts.assertEquals(findByCarmodel, `SELECT * FROM public.users WHERE CarModEl = ?`);
}
} | the_stack |
import * as nls from 'vscode-nls';
let localize = nls.loadMessageBundle();
import * as os from 'os';
import * as fs from 'fs';
import * as path from 'path';
import {ChromeDebugAdapter as CoreDebugAdapter, logger, utils as coreUtils, ISourceMapPathOverrides, ChromeDebugSession, telemetry, ITelemetryPropertyCollector, IOnPausedResult, Version } from 'vscode-chrome-debug-core';
import { spawn, ChildProcess, fork, execSync } from 'child_process';
import { Crdp } from 'vscode-chrome-debug-core';
import { DebugProtocol } from 'vscode-debugprotocol';
import { ILaunchRequestArgs, IAttachRequestArgs, ICommonRequestArgs, ISetExpressionArgs, VSDebugProtocolCapabilities, ISetExpressionResponseBody } from './chromeDebugInterfaces';
import * as utils from './utils';
import * as errors from './errors';
import { FinishedStartingUpEventArguments } from 'vscode-chrome-debug-core/lib/src/executionTimingsReporter';
import { ChromeProvidedPortConnection } from './chromeProvidedPortConnection';
// Keep in sync with sourceMapPathOverrides package.json default
const DefaultWebSourceMapPathOverrides: ISourceMapPathOverrides = {
'webpack:///./~/*': '${webRoot}/node_modules/*',
'webpack:///./*': '${webRoot}/*',
'webpack:///*': '*',
'webpack:///src/*': '${webRoot}/*',
'meteor://💻app/*': '${webRoot}/*'
};
interface IExtendedInitializeRequestArguments extends DebugProtocol.InitializeRequestArguments {
supportsLaunchUnelevatedProcessRequest?: boolean;
}
export class ChromeDebugAdapter extends CoreDebugAdapter {
private _pagePauseMessage = 'Paused in Visual Studio Code';
private _chromeProc: ChildProcess;
private _overlayHelper: utils.DebounceHelper;
private _chromePID: number;
private _userRequestedUrl: string;
private _doesHostSupportLaunchUnelevatedProcessRequest: boolean;
protected _chromeConnection: ChromeProvidedPortConnection;
public initialize(args: IExtendedInitializeRequestArguments): VSDebugProtocolCapabilities {
this._overlayHelper = new utils.DebounceHelper(/*timeoutMs=*/200);
const capabilities: VSDebugProtocolCapabilities = super.initialize(args);
capabilities.supportsRestartRequest = true;
capabilities.supportsSetExpression = true;
capabilities.supportsLogPoints = true;
if (args.locale) {
localize = nls.config({ locale: args.locale, bundleFormat: nls.BundleFormat.standalone })();
}
this._doesHostSupportLaunchUnelevatedProcessRequest = args.supportsLaunchUnelevatedProcessRequest || false;
return capabilities;
}
public launch(args: ILaunchRequestArgs, telemetryPropertyCollector: ITelemetryPropertyCollector, seq?: number): Promise<void> {
if ((args.breakOnLoad || typeof args.breakOnLoad === 'undefined') && !args.breakOnLoadStrategy) {
args.breakOnLoadStrategy = 'instrument';
}
return super.launch(args, telemetryPropertyCollector).then(async () => {
let runtimeExecutable: string;
if (args.shouldLaunchChromeUnelevated !== undefined) {
telemetryPropertyCollector.addTelemetryProperty('shouldLaunchChromeUnelevated', args.shouldLaunchChromeUnelevated.toString());
}
if (this._doesHostSupportLaunchUnelevatedProcessRequest) {
telemetryPropertyCollector.addTelemetryProperty('doesHostSupportLaunchUnelevated', 'true');
}
if (args.runtimeExecutable) {
const re = findExecutable(args.runtimeExecutable);
if (!re) {
return errors.getNotExistErrorResponse('runtimeExecutable', args.runtimeExecutable);
}
runtimeExecutable = re;
}
runtimeExecutable = runtimeExecutable || utils.getBrowserPath();
if (!runtimeExecutable) {
return coreUtils.errP(localize('attribute.chrome.missing', "Can't find Chrome - install it or set the \"runtimeExecutable\" field in the launch config."));
}
// Start with remote debugging enabled
// allow port = 0
let port = (args.port !== undefined) ? args.port : 9222;
const chromeArgs: string[] = [];
const chromeEnv: coreUtils.IStringDictionary<string> = args.env || null;
const chromeWorkingDir: string = args.cwd || null;
if (!args.noDebug) {
chromeArgs.push('--remote-debugging-port=' + port);
}
// Also start with extra stuff disabled
chromeArgs.push(...['--no-first-run', '--no-default-browser-check']);
if (args.runtimeArgs) {
telemetryPropertyCollector.addTelemetryProperty('numberOfChromeCmdLineSwitchesBeingUsed', String(args.runtimeArgs.length));
chromeArgs.push(...args.runtimeArgs);
}
// Set a default userDataDir, if the user opted in explicitly with 'true' or if args.userDataDir is not set (only when runtimeExecutable is not set).
// Can't set it automatically with runtimeExecutable because it may not be desired with Electron, other runtimes, random scripts.
if (
args.userDataDir === true ||
(typeof args.userDataDir === 'undefined' && !args.runtimeExecutable)
) {
args.userDataDir = path.join(os.tmpdir(), `vscode-chrome-debug-userdatadir_${port}`);
}
if (args.userDataDir) {
chromeArgs.push('--user-data-dir=' + args.userDataDir);
this._chromeConnection.setUserDataDir(args.userDataDir);
}
if (args._clientOverlayPausedMessage) {
this._pagePauseMessage = args._clientOverlayPausedMessage;
}
let launchUrl: string;
if (args.file) {
launchUrl = coreUtils.pathToFileURL(args.file);
} else if (args.url) {
launchUrl = args.url;
}
if (launchUrl && !args.noDebug) {
// We store the launch file/url provided and temporarily launch and attach to about:blank page. Once we receive configurationDone() event, we redirect the page to this file/url
// This is done to facilitate hitting breakpoints on load
this._userRequestedUrl = launchUrl;
launchUrl = 'about:blank';
}
if (launchUrl) {
chromeArgs.push(launchUrl);
}
this._chromeProc = await this.spawnChrome(runtimeExecutable, chromeArgs, chromeEnv, chromeWorkingDir, !!args.runtimeExecutable,
args.shouldLaunchChromeUnelevated);
if (this._chromeProc) {
this._chromeProc.on('error', (err) => {
const errMsg = 'Chrome error: ' + err;
logger.error(errMsg);
this.terminateSession(errMsg);
});
}
return args.noDebug ? undefined :
this.doAttach(port, launchUrl || args.urlFilter, args.address, args.timeout, undefined, args.extraCRDPChannelPort);
});
}
public attach(args: IAttachRequestArgs): Promise<void> {
if (args.urlFilter) {
args.url = args.urlFilter;
}
return super.attach(args);
}
protected hookConnectionEvents(): void {
super.hookConnectionEvents();
this.chrome.Page.on('frameNavigated', params => this.onFrameNavigated(params));
}
protected onFrameNavigated(params: Crdp.Page.FrameNavigatedEvent): void {
if (this._userRequestedUrl) {
const url = params.frame.url;
const requestedUrlNoAnchor = this._userRequestedUrl.split('#')[0]; // Frame navigated url doesn't include the anchor
if (url === requestedUrlNoAnchor || decodeURI(url) === requestedUrlNoAnchor) { // 'http://localhost:1234/test%20page' will use the not decoded version, 'http://localhost:1234/test page' will use the decoded version
// Chrome started to navigate to the user's requested url
this.events.emit(ChromeDebugSession.FinishedStartingUpEventName, { requestedContentWasDetected: true } as FinishedStartingUpEventArguments);
} else if (url === 'chrome-error://chromewebdata/') {
// Chrome couldn't retrieve the web-page in the requested url
this.events.emit(ChromeDebugSession.FinishedStartingUpEventName, { requestedContentWasDetected: false, reasonForNotDetected: 'UnreachableURL'} as FinishedStartingUpEventArguments);
} else if (url.startsWith('chrome-error://')) {
// Uknown chrome error
this.events.emit(ChromeDebugSession.FinishedStartingUpEventName, { requestedContentWasDetected: false, reasonForNotDetected: 'UnknownChromeError'} as FinishedStartingUpEventArguments);
}
}
}
public async configurationDone(): Promise<void> {
if (this._userRequestedUrl) {
// This means all the setBreakpoints requests have been completed. So we can navigate to the original file/url.
this.chrome.Page.navigate({ url: this._userRequestedUrl }).then(() => {
/* __GDPR__FRAGMENT__
"StepNames" : {
"RequestedNavigateToUserPage" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" }
}
*/
this.events.emitMilestoneReached('RequestedNavigateToUserPage');
});
}
await super.configurationDone();
}
public commonArgs(args: ICommonRequestArgs): void {
if (args.webRoot && (!args.pathMapping || !args.pathMapping['/'])) {
args.pathMapping = args.pathMapping || {};
args.pathMapping['/'] = args.webRoot;
}
args.sourceMaps = typeof args.sourceMaps === 'undefined' || args.sourceMaps;
args.sourceMapPathOverrides = getSourceMapPathOverrides(args.webRoot, args.sourceMapPathOverrides);
args.skipFileRegExps = ['^chrome-extension:.*'];
if (args.targetTypes === undefined) {
args.targetFilter = utils.defaultTargetFilter;
} else {
args.targetFilter = utils.getTargetFilter(args.targetTypes);
}
args.smartStep = typeof args.smartStep === 'undefined' ? !this._isVSClient : args.smartStep;
super.commonArgs(args);
}
protected doAttach(port: number, targetUrl?: string, address?: string, timeout?: number, websocketUrl?: string, extraCRDPChannelPort?: number): Promise<void> {
return super.doAttach(port, targetUrl, address, timeout, websocketUrl, extraCRDPChannelPort).then(async () => {
// Don't return this promise, a failure shouldn't fail attach
this.globalEvaluate({ expression: 'navigator.userAgent', silent: true })
.then(
evalResponse => logger.log('Target userAgent: ' + evalResponse.result.value),
err => logger.log('Getting userAgent failed: ' + err.message))
.then(() => {
const configDisableNetworkCache = (<ICommonRequestArgs>this._launchAttachArgs).disableNetworkCache;
const cacheDisabled = typeof configDisableNetworkCache === 'boolean' ?
configDisableNetworkCache :
true;
this.chrome.Network.setCacheDisabled({ cacheDisabled }).catch(() => {
// Ignore failure
});
});
const versionInformationPromise = this.chrome.Browser.getVersion().then(
response => {
const properties = {
'Versions.Target.CRDPVersion': response.protocolVersion,
'Versions.Target.Revision': response.revision,
'Versions.Target.UserAgent': response.userAgent,
'Versions.Target.V8': response.jsVersion
};
const parts = (response.product || '').split('/');
if (parts.length === 2) { // Currently response.product looks like "Chrome/65.0.3325.162" so we split the project and the actual version number
properties['Versions.Target.Project'] = parts[0];
properties['Versions.Target.Version'] = parts[1];
} else { // If for any reason that changes, we submit the entire product as-is
properties['Versions.Target.Product'] = response.product;
}
return properties;
},
err => {
logger.log('Getting userAgent failed: ' + err.message);
const properties = { 'Versions.Target.NoUserAgentReason': 'Error while retriving target user agent' } as telemetry.IExecutionResultTelemetryProperties;
coreUtils.fillErrorDetails(properties, err);
return properties;
});
// Send the versions information as it's own event so we can easily backfill other events in the user session if needed
/* __GDPR__FRAGMENT__
"VersionInformation" : {
"Versions.Target.CRDPVersion" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.Revision" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.UserAgent" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.V8" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.V<NUMBER>" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.Project" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.Version" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.Product" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"Versions.Target.NoUserAgentReason" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"${include}": [ "${IExecutionResultTelemetryProperties}" ]
}
*/
/* __GDPR__
"target-version" : {
"${include}": [ "${DebugCommonProperties}" ]
}
*/
versionInformationPromise.then(versionInformation => telemetry.telemetry.reportEvent('target-version', versionInformation));
try {
if (this._breakOnLoadHelper) {
// This is what -core is doing. We only actually care to see if this fails, to see if we need to apply the workaround
const browserVersion = (await this._chromeConnection.version).browser;
if (!browserVersion.isAtLeastVersion(0, 1)) { // If this is true it means it's unknown version
logger.log(`/json/version failed, attempting workaround to get the version`);
// If the original way failed, we try to use versionInformationPromise to get this information
const versionInformation = await versionInformationPromise;
const alternativeBrowserVersion = Version.parse(versionInformation['Versions.Target.Version']);
this._breakOnLoadHelper.setBrowserVersion(alternativeBrowserVersion);
}
}
} catch (exception) {
// If something fails we report telemetry and we ignore it
telemetry.telemetry.reportEvent('break-on-load-target-version-workaround-failed', exception);
}
/* __GDPR__FRAGMENT__
"DebugCommonProperties" : {
"${include}": [ "${VersionInformation}" ]
}
*/
telemetry.telemetry.addCustomGlobalProperty(versionInformationPromise);
});
}
protected runConnection(): Promise<void>[] {
return [
...super.runConnection(),
this.chrome.Page.enable(),
this.chrome.Network.enable({})
];
}
protected async onPaused(notification: Crdp.Debugger.PausedEvent, expectingStopReason = this._expectingStopReason): Promise<IOnPausedResult> {
const result = (await super.onPaused(notification, expectingStopReason));
if (result.didPause) {
this._overlayHelper.doAndCancel(() => {
return this._domains.has('Overlay') ?
this.chrome.Overlay.setPausedInDebuggerMessage({ message: this._pagePauseMessage }).catch(() => { }) :
(<any>this.chrome).Page.configureOverlay({ message: this._pagePauseMessage }).catch(() => { });
});
}
return result;
}
protected threadName(): string {
return 'Chrome';
}
protected onResumed(): void {
this._overlayHelper.wait(() => {
return this._domains.has('Overlay') ?
this.chrome.Overlay.setPausedInDebuggerMessage({ }).catch(() => { }) :
(<any>this.chrome).Page.configureOverlay({ }).catch(() => { });
});
super.onResumed();
}
public async disconnect(args: DebugProtocol.DisconnectArguments): Promise<void> {
const hadTerminated = this._hasTerminated;
// Disconnect before killing Chrome, because running "taskkill" when it's paused sometimes doesn't kill it
super.disconnect(args);
if ( (this._chromeProc || this._chromePID) && !hadTerminated) {
// Only kill Chrome if the 'disconnect' originated from vscode. If we previously terminated
// due to Chrome shutting down, or devtools taking over, don't kill Chrome.
if (coreUtils.getPlatform() === coreUtils.Platform.Windows && this._chromePID) {
this.killChromeOnWindows(this._chromePID);
} else if (this._chromeProc) {
logger.log('Killing Chrome process');
this._chromeProc.kill('SIGINT');
}
}
this._chromeProc = null;
}
private async killChromeOnWindows(chromePID: number): Promise<void> {
let taskkillCmd = `taskkill /PID ${chromePID}`;
logger.log(`Killing Chrome process by pid: ${taskkillCmd}`);
try {
execSync(taskkillCmd);
} catch (e) {
// The command will fail if process was not found. This can be safely ignored.
}
for (let i = 0 ; i < 10; i++) {
// Check to see if the process is still running, with CSV output format
let tasklistCmd = `tasklist /FI "PID eq ${chromePID}" /FO CSV`;
logger.log(`Looking up process by pid: ${tasklistCmd}`);
let tasklistOutput = execSync(tasklistCmd).toString();
// If the process is found, tasklist will output CSV with one of the values being the PID. Exit code will be 0.
// If the process is not found, tasklist will give a generic "not found" message instead. Exit code will also be 0.
// If we see an entry in the CSV for the PID, then we can assume the process was found.
if (!tasklistOutput.includes(`"${chromePID}"`)) {
logger.log(`Chrome process with pid ${chromePID} is not running`);
return;
}
// Give the process some time to close gracefully
logger.log(`Chrome process with pid ${chromePID} is still alive, waiting...`);
await new Promise<void>((resolve) => {
setTimeout(resolve, 200);
});
}
// At this point we can assume the process won't close on its own, so force kill it
let taskkillForceCmd = `taskkill /F /PID ${chromePID}`;
logger.log(`Killing Chrome process timed out. Killing again using force: ${taskkillForceCmd}`);
try {
execSync(taskkillForceCmd);
} catch (e) {}
}
/**
* Opt-in event called when the 'reload' button in the debug widget is pressed
*/
public restart(): Promise<void> {
return this.chrome ?
this.chrome.Page.reload({ ignoreCache: true }) :
Promise.resolve();
}
private async spawnChrome(chromePath: string, chromeArgs: string[], env: coreUtils.IStringDictionary<string>,
cwd: string, usingRuntimeExecutable: boolean, shouldLaunchUnelevated: boolean): Promise<ChildProcess> {
/* __GDPR__FRAGMENT__
"StepNames" : {
"LaunchTarget.LaunchExe" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" }
}
*/
this.events.emitStepStarted('LaunchTarget.LaunchExe');
const platform = coreUtils.getPlatform();
if (platform === coreUtils.Platform.Windows && shouldLaunchUnelevated) {
let chromePid: number;
if (this._doesHostSupportLaunchUnelevatedProcessRequest) {
chromePid = await this.spawnChromeUnelevatedWithClient(chromePath, chromeArgs);
} else {
chromePid = await this.spawnChromeUnelevatedWithWindowsScriptHost(chromePath, chromeArgs);
}
this._chromePID = chromePid;
// Cannot get the real Chrome process, so return null.
return null;
} else if (platform === coreUtils.Platform.Windows && !usingRuntimeExecutable) {
const options = {
execArgv: [],
silent: true
};
if (env) {
options['env'] = this.getFullEnv(env);
}
if (cwd) {
options['cwd'] = cwd;
}
const chromeProc = fork(getChromeSpawnHelperPath(), [chromePath, ...chromeArgs], options);
chromeProc.unref();
chromeProc.on('message', data => {
const pidStr = data.toString();
logger.log('got chrome PID: ' + pidStr);
this._chromePID = parseInt(pidStr, 10);
});
chromeProc.on('error', (err) => {
const errMsg = 'chromeSpawnHelper error: ' + err;
logger.error(errMsg);
});
chromeProc.stderr.on('data', data => {
logger.error('[chromeSpawnHelper] ' + data.toString());
});
chromeProc.stdout.on('data', data => {
logger.log('[chromeSpawnHelper] ' + data.toString());
});
return chromeProc;
} else {
logger.log(`spawn('${chromePath}', ${JSON.stringify(chromeArgs) })`);
const options = {
detached: true,
stdio: ['ignore' as const],
};
if (env) {
options['env'] = this.getFullEnv(env);
}
if (cwd) {
options['cwd'] = cwd;
}
const chromeProc = spawn(chromePath, chromeArgs, options);
chromeProc.unref();
this._chromePID = chromeProc.pid;
return chromeProc;
}
}
private async spawnChromeUnelevatedWithWindowsScriptHost(chromePath: string, chromeArgs: string[]): Promise<number> {
const semaphoreFile = path.join(os.tmpdir(), 'launchedUnelevatedChromeProcess.id');
if (fs.existsSync(semaphoreFile)) { // remove the previous semaphoreFile if it exists.
fs.unlinkSync(semaphoreFile);
}
const chromeProc = fork(getChromeSpawnHelperPath(),
[`${process.env.windir}\\System32\\cscript.exe`, path.join(__dirname, 'launchUnelevated.js'),
semaphoreFile, chromePath, ...chromeArgs], {});
chromeProc.unref();
await new Promise<void>((resolve, reject) => {
chromeProc.on('message', resolve);
});
const pidStr = await findNewlyLaunchedChromeProcess(semaphoreFile);
if (pidStr) {
logger.log(`Parsed output file and got Chrome PID ${pidStr}`);
return parseInt(pidStr, 10);
}
return null;
}
private getFullEnv(customEnv: coreUtils.IStringDictionary<string>): coreUtils.IStringDictionary<string> {
const env = {
...process.env,
...customEnv
};
Object.keys(env).filter(k => env[k] === null).forEach(key => delete env[key]);
return env;
}
private async spawnChromeUnelevatedWithClient(chromePath: string, chromeArgs: string[]): Promise<number> {
return new Promise<number>((resolve, reject) => {
this._session.sendRequest('launchUnelevated', {
'process': chromePath,
'args': chromeArgs
}, 10000, (response) => {
if (!response.success) {
reject(new Error(response.message));
} else {
resolve(response.body.processId);
}
});
});
}
public async setExpression(args: ISetExpressionArgs): Promise<ISetExpressionResponseBody> {
const reconstructedExpression = `${args.expression} = ${args.value}`;
const evaluateEventArgs: DebugProtocol.EvaluateArguments = {
expression: reconstructedExpression,
frameId: args.frameId,
format: args.format,
context: 'repl'
};
const evaluateResult = await this.evaluate(evaluateEventArgs);
return {
value: evaluateResult.result
};
// Beware that after the expression is changed, the variables on the current stackFrame will not
// be updated, which means the return value of the Runtime.getProperties request will not contain
// this change until the breakpoint is released(step over or continue).
//
// See also: https://bugs.chromium.org/p/chromium/issues/detail?id=820535
}
}
function getSourceMapPathOverrides(webRoot: string, sourceMapPathOverrides?: ISourceMapPathOverrides): ISourceMapPathOverrides {
return sourceMapPathOverrides ? resolveWebRootPattern(webRoot, sourceMapPathOverrides, /*warnOnMissing=*/true) :
resolveWebRootPattern(webRoot, DefaultWebSourceMapPathOverrides, /*warnOnMissing=*/false);
}
/**
* Returns a copy of sourceMapPathOverrides with the ${webRoot} pattern resolved in all entries.
*
* dynamically required by test
*/
export function resolveWebRootPattern(webRoot: string, sourceMapPathOverrides: ISourceMapPathOverrides, warnOnMissing: boolean): ISourceMapPathOverrides {
const resolvedOverrides: ISourceMapPathOverrides = {};
for (let pattern in sourceMapPathOverrides) {
const replacePattern = replaceWebRootInSourceMapPathOverridesEntry(webRoot, pattern, warnOnMissing);
const replacePatternValue = replaceWebRootInSourceMapPathOverridesEntry(webRoot, sourceMapPathOverrides[pattern], warnOnMissing);
resolvedOverrides[replacePattern] = replacePatternValue;
}
return resolvedOverrides;
}
function replaceWebRootInSourceMapPathOverridesEntry(webRoot: string, entry: string, warnOnMissing: boolean): string {
const webRootIndex = entry.indexOf('${webRoot}');
if (webRootIndex === 0) {
if (webRoot) {
return entry.replace('${webRoot}', webRoot);
} else if (warnOnMissing) {
logger.log('Warning: sourceMapPathOverrides entry contains ${webRoot}, but webRoot is not set');
}
} else if (webRootIndex > 0) {
logger.log('Warning: in a sourceMapPathOverrides entry, ${webRoot} is only valid at the beginning of the path');
}
return entry;
}
function getChromeSpawnHelperPath(): string {
return path.join(__dirname, 'chromeSpawnHelper.js');
}
function findExecutable(program: string): string | undefined {
if (process.platform === 'win32' && !path.extname(program)) {
const PATHEXT = process.env['PATHEXT'];
if (PATHEXT) {
const executableExtensions = PATHEXT.split(';');
for (const extension of executableExtensions) {
const programPath = program + extension;
if (fs.existsSync(programPath)) {
return programPath;
}
}
}
}
if (fs.existsSync(program)) {
return program;
}
return undefined;
}
async function findNewlyLaunchedChromeProcess(semaphoreFile: string): Promise<string> {
const regexPattern = /processid\s+=\s+(\d+)\s*;/i;
let lastAccessFileContent: string;
for (let i = 0 ; i < 25; i++) {
if (fs.existsSync(semaphoreFile)) {
lastAccessFileContent = fs.readFileSync(semaphoreFile, {
encoding: 'utf16le'
}).toString();
const lines = lastAccessFileContent.split('\n');
const matchedLines = (lines || []).filter(line => line.match(regexPattern));
if (matchedLines.length > 1) {
throw new Error(`Unexpected semaphore file format ${lines}`);
}
if (matchedLines.length === 1) {
const match = matchedLines[0].match(regexPattern);
return match[1];
}
// else == 0, wait for 200 ms delay and try again.
}
await new Promise<void>((resolve) => {
setTimeout(resolve, 200);
});
}
const error = new Error(`Cannot acquire Chrome process id`);
let telemetryProperties: any = {
semaphoreFileContent: lastAccessFileContent
};
coreUtils.fillErrorDetails(telemetryProperties, error);
/* __GDPR__
"error" : {
"semaphoreFileContent" : { "classification": "SystemMetaData", "purpose": "FeatureInsight" },
"${include}": [
"${IExecutionResultTelemetryProperties}",
"${DebugCommonProperties}"
]
}
*/
telemetry.telemetry.reportEvent('error', telemetryProperties);
return null;
} | the_stack |
import { BigNumber, constants, providers, utils, Wallet } from "ethers";
import Sinon, { restore, reset, createStubInstance, SinonStubbedInstance, SinonStub, stub } from "sinon";
import {
getRandomAddress,
getRandomBytes32,
mkAddress,
RequestContext,
expect,
Logger,
requestContextMock,
} from "@connext/nxtp-utils";
import { cachedPriceMap, ChainReader } from "../src/chainreader";
import { RpcProviderAggregator } from "../src/rpcProviderAggregator";
import { ChainNotSupported, ConfigurationError, ProviderNotConfigured, RpcError } from "../src/shared";
import * as contractFns from "../src/shared/contracts";
import {
TEST_SENDER_CHAIN_ID,
TEST_TX,
TEST_READ_TX,
TEST_TX_RECEIPT,
makeChaiReadable,
TEST_RECEIVER_CHAIN_ID,
} from "./utils";
const logger = new Logger({
level: process.env.LOG_LEVEL ?? "silent",
name: "ChainReaderTest",
});
let signer: SinonStubbedInstance<Wallet>;
let chainReader: ChainReader;
let provider: SinonStubbedInstance<RpcProviderAggregator>;
let context: RequestContext = {
id: "",
origin: "",
};
/// In these tests, we are testing the outer shell of chainreader - the interface, not the core functionality.
/// For core functionality tests, see dispatch.spec.ts and provider.spec.ts.
describe("ChainReader", () => {
beforeEach(() => {
provider = createStubInstance(RpcProviderAggregator);
signer = createStubInstance(Wallet);
signer.connect.resolves(true);
const chains = {
[TEST_SENDER_CHAIN_ID.toString()]: {
providers: [{ url: "https://-------------" }],
confirmations: 1,
gasStations: [],
},
};
chainReader = new ChainReader(logger, { chains }, signer);
Sinon.stub(chainReader as any, "getProvider").callsFake((chainId: number) => {
// NOTE: We check to make sure we are only getting the one chainId we expect
// to get in these unit tests.
expect(chainId).to.be.eq(TEST_SENDER_CHAIN_ID);
return provider;
});
context.id = getRandomBytes32();
context.origin = "ChainReaderTest";
});
afterEach(() => {
restore();
reset();
});
describe("#readTx", () => {
it("happy: returns exactly what it reads", async () => {
const fakeData = getRandomBytes32();
provider.readContract.resolves(fakeData);
const data = await chainReader.readTx(TEST_READ_TX);
expect(data).to.deep.eq(fakeData);
expect(provider.readContract.callCount).to.equal(1);
expect(provider.readContract.args[0][0]).to.deep.eq(TEST_READ_TX);
});
it("should throw if provider fails", async () => {
provider.readContract.rejects(new RpcError("fail"));
await expect(chainReader.readTx(TEST_READ_TX)).to.be.rejectedWith("fail");
});
});
describe("#getBalance", () => {
it("happy", async () => {
const testBalance = utils.parseUnits("42", "ether");
const testAddress = getRandomAddress();
provider.getBalance.resolves(testBalance);
const balance = await chainReader.getBalance(TEST_SENDER_CHAIN_ID, testAddress);
expect(balance.eq(testBalance)).to.be.true;
expect(provider.getBalance.callCount).to.equal(1);
expect(provider.getBalance.getCall(0).args[0]).to.deep.eq(testAddress);
});
it("should throw if provider fails", async () => {
provider.getBalance.rejects(new RpcError("fail"));
await expect(chainReader.getBalance(TEST_SENDER_CHAIN_ID, mkAddress("0xaaa"))).to.be.rejectedWith("fail");
});
});
describe("#getGasPrice", () => {
it("happy", async () => {
const testGasPrice = utils.parseUnits("5", "gwei");
provider.getGasPrice.resolves(testGasPrice);
const gasPrice = await chainReader.getGasPrice(TEST_SENDER_CHAIN_ID, requestContextMock);
expect(gasPrice.eq(testGasPrice)).to.be.true;
expect(provider.getGasPrice.callCount).to.equal(1);
});
it("should throw if provider fails", async () => {
provider.getGasPrice.rejects(new RpcError("fail"));
await expect(chainReader.getGasPrice(TEST_SENDER_CHAIN_ID, requestContextMock)).to.be.rejectedWith("fail");
});
});
describe("#getDecimalsForAsset", () => {
it("happy", async () => {
const decimals = 18;
const assetId = mkAddress("0xaaa");
provider.getDecimalsForAsset.resolves(decimals);
const retrieved = await chainReader.getDecimalsForAsset(TEST_SENDER_CHAIN_ID, assetId);
expect(retrieved).to.be.eq(decimals);
expect(provider.getDecimalsForAsset.callCount).to.equal(1);
expect(provider.getDecimalsForAsset.getCall(0).args[0]).to.deep.eq(assetId);
});
it("should throw if provider fails", async () => {
provider.getDecimalsForAsset.rejects(new RpcError("fail"));
await expect(chainReader.getDecimalsForAsset(TEST_SENDER_CHAIN_ID, mkAddress("0xaaa"))).to.be.rejectedWith(
"fail",
);
});
});
describe("#getBlockTime", () => {
it("happy", async () => {
const time = Math.floor(Date.now() / 1000);
provider.getBlockTime.resolves(time);
const blockTime = await chainReader.getBlockTime(TEST_SENDER_CHAIN_ID);
expect(blockTime).to.be.eq(time);
expect(provider.getBlockTime.callCount).to.equal(1);
});
it("should throw if provider fails", async () => {
provider.getBlockTime.rejects(new RpcError("fail"));
await expect(chainReader.getBlockTime(TEST_SENDER_CHAIN_ID)).to.be.rejectedWith("fail");
});
});
describe("#getBlockNumber", () => {
it("happy", async () => {
const testBlockNumber = 42;
provider.getBlockNumber.resolves(testBlockNumber);
const blockNumber = await chainReader.getBlockNumber(TEST_SENDER_CHAIN_ID);
expect(blockNumber).to.be.eq(testBlockNumber);
expect(provider.getBlockNumber.callCount).to.equal(1);
});
it("should throw if provider fails", async () => {
provider.getBlockNumber.rejects(new RpcError("fail"));
await expect(chainReader.getBlockNumber(TEST_SENDER_CHAIN_ID)).to.be.rejectedWith("fail");
});
});
describe("#getTransactionReceipt", () => {
it("happy", async () => {
provider.getTransactionReceipt.resolves(TEST_TX_RECEIPT);
const receipt = await chainReader.getTransactionReceipt(TEST_SENDER_CHAIN_ID, TEST_TX_RECEIPT.transactionHash);
expect(makeChaiReadable(receipt)).to.deep.eq(makeChaiReadable(TEST_TX_RECEIPT));
expect(provider.getTransactionReceipt.callCount).to.be.eq(1);
});
it("should throw if provider fails", async () => {
provider.getTransactionReceipt.rejects(new RpcError("fail"));
await expect(
chainReader.getTransactionReceipt(TEST_SENDER_CHAIN_ID, TEST_TX_RECEIPT.transactionHash),
).to.be.rejectedWith("fail");
});
});
describe("#getCode", () => {
it("happy", async () => {
const code = "0x12345789";
provider.getCode.resolves(code);
const result = await chainReader.getCode(TEST_SENDER_CHAIN_ID, mkAddress("0xa1"));
expect(result).to.be.eq(code);
expect(provider.getCode.callCount).to.equal(1);
});
it("should throw if provider fails", async () => {
provider.getCode.rejects(new RpcError("fail"));
await expect(chainReader.getCode(TEST_SENDER_CHAIN_ID, mkAddress("0xa1"))).to.be.rejectedWith("fail");
});
});
describe("#getTokenPrice", () => {
const priceOracleContractFakeAddr = mkAddress("0x7f");
let getDeployedPriceOracleContractStub: SinonStub;
let getPriceOracleInterfaceStub: SinonStub;
let getTokenPriceFromOnChainStub: SinonStub;
let readTxStub: SinonStub;
let interfaceStub: SinonStubbedInstance<utils.Interface>;
beforeEach(() => {
interfaceStub = createStubInstance(utils.Interface);
getPriceOracleInterfaceStub = Sinon.stub(contractFns, "getPriceOracleInterface");
getPriceOracleInterfaceStub.returns(interfaceStub);
getDeployedPriceOracleContractStub = Sinon.stub(contractFns, "getDeployedPriceOracleContract");
getDeployedPriceOracleContractStub.returns({
address: priceOracleContractFakeAddr,
abi: ["fakeAbi()"],
});
readTxStub = Sinon.stub(chainReader, "readTx");
});
it("happy", async () => {
const assetId = mkAddress("0xc3");
const data = "0x123456789";
const tokenPrice = "5812471953821";
interfaceStub.encodeFunctionData.returns(data);
readTxStub.resolves(tokenPrice);
const result = await chainReader.getTokenPrice(TEST_SENDER_CHAIN_ID, assetId);
expect(result.toString()).to.be.eq(tokenPrice);
expect(getDeployedPriceOracleContractStub.getCall(0).args).to.deep.eq([TEST_SENDER_CHAIN_ID]);
expect(interfaceStub.encodeFunctionData.getCall(0).args).to.deep.eq(["getTokenPrice", [assetId]]);
expect(readTxStub.getCall(0).args[0]).to.deep.eq({
chainId: TEST_SENDER_CHAIN_ID,
to: priceOracleContractFakeAddr,
data,
});
});
it("should throw ChainNotSupported if chain not supported for token pricing", async () => {
getDeployedPriceOracleContractStub.returns(undefined);
await expect(chainReader.getTokenPrice(TEST_SENDER_CHAIN_ID, mkAddress("0xa1"))).to.be.rejectedWith(
ChainNotSupported,
);
});
it("should return cached price if updated timestamp less than 1 min", async () => {
getTokenPriceFromOnChainStub = Sinon.stub(chainReader, "getTokenPriceFromOnChain");
const assetId = mkAddress("0xc3");
const currentTimestamp = Math.floor(Date.now() / 1000);
const tokenPrice = BigNumber.from("5812471953821");
const cachedPriceKey = TEST_SENDER_CHAIN_ID.toString().concat("-").concat(assetId);
cachedPriceMap.set(cachedPriceKey, {
timestamp: currentTimestamp - 30,
price: tokenPrice,
});
getTokenPriceFromOnChainStub.returns(BigNumber.from("581247195382112121212"));
expect((await chainReader.getTokenPrice(TEST_SENDER_CHAIN_ID, assetId)).toString()).to.be.eq(
tokenPrice.toString(),
);
});
it("should return real price if updated timestamp more than 1 min", async () => {
getTokenPriceFromOnChainStub = Sinon.stub(chainReader, "getTokenPriceFromOnChain");
const assetId = mkAddress("0xc3");
const currentTimestamp = Math.floor(Date.now() / 1000);
const tokenPrice = BigNumber.from("5812471953821");
const cachedPriceKey = TEST_SENDER_CHAIN_ID.toString().concat("-").concat(assetId).concat("latest");
cachedPriceMap.set(cachedPriceKey, {
timestamp: currentTimestamp - 61,
price: tokenPrice,
});
getTokenPriceFromOnChainStub.returns(BigNumber.from("581247195382112121212"));
expect((await chainReader.getTokenPrice(TEST_SENDER_CHAIN_ID, assetId)).toString()).to.be.eq(
"581247195382112121212",
);
});
});
describe("#getTokenPriceFromOnChain", () => {
const priceOracleContractFakeAddr = mkAddress("0x7f");
let getDeployedPriceOracleContractStub: SinonStub;
let getPriceOracleInterfaceStub: SinonStub;
let readTxStub: SinonStub;
let interfaceStub: SinonStubbedInstance<utils.Interface>;
beforeEach(() => {
interfaceStub = createStubInstance(utils.Interface);
getPriceOracleInterfaceStub = Sinon.stub(contractFns, "getPriceOracleInterface");
getPriceOracleInterfaceStub.returns(interfaceStub);
getDeployedPriceOracleContractStub = Sinon.stub(contractFns, "getDeployedPriceOracleContract");
getDeployedPriceOracleContractStub.returns({
address: priceOracleContractFakeAddr,
abi: ["fakeAbi()"],
});
readTxStub = Sinon.stub(chainReader, "readTx");
});
it("happy", async () => {
const assetId = mkAddress("0xc3");
const data = "0x123456789";
const tokenPrice = "5812471953821";
interfaceStub.encodeFunctionData.returns(data);
readTxStub.resolves(tokenPrice);
const result = await chainReader.getTokenPriceFromOnChain(TEST_SENDER_CHAIN_ID, assetId);
expect(result.toString()).to.be.eq(tokenPrice);
expect(getDeployedPriceOracleContractStub.getCall(0).args).to.deep.eq([TEST_SENDER_CHAIN_ID]);
expect(interfaceStub.encodeFunctionData.getCall(0).args).to.deep.eq(["getTokenPrice", [assetId]]);
expect(readTxStub.getCall(0).args[0]).to.deep.eq({
chainId: TEST_SENDER_CHAIN_ID,
to: priceOracleContractFakeAddr,
data,
});
});
it("should throw ChainNotSupported if chain not supported for token pricing", async () => {
getDeployedPriceOracleContractStub.returns(undefined);
await expect(chainReader.getTokenPriceFromOnChain(TEST_SENDER_CHAIN_ID, mkAddress("0xa1"))).to.be.rejectedWith(
ChainNotSupported,
);
});
});
describe("#calculateGasFeeInReceivingToken", () => {
let calculateGasFeeStub: SinonStub;
beforeEach(() => {
calculateGasFeeStub = Sinon.stub(chainReader as any, "calculateGasFee");
});
it("happy: should return sum of both chains calculations'", async () => {
const gasFeeSenderFulfill = BigNumber.from(124098148);
const gasFeeReceiverPrepare = BigNumber.from(1151259044);
const expectedTotal = gasFeeReceiverPrepare.add(gasFeeSenderFulfill);
const sendingAssetId = mkAddress("0xa1");
const receivingAssetId = mkAddress("0xb2");
calculateGasFeeStub.onFirstCall().resolves(gasFeeSenderFulfill);
calculateGasFeeStub.onSecondCall().resolves(gasFeeReceiverPrepare);
const result = await chainReader.calculateGasFeeInReceivingToken(
TEST_SENDER_CHAIN_ID,
sendingAssetId,
TEST_RECEIVER_CHAIN_ID,
receivingAssetId,
18,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.eq(expectedTotal.toNumber());
expect(calculateGasFeeStub.getCall(0).args.slice(0, 5)).to.deep.eq([
TEST_SENDER_CHAIN_ID,
sendingAssetId,
18,
"fulfill",
false,
]);
expect(calculateGasFeeStub.getCall(1).args.slice(0, 5)).to.deep.eq([
TEST_RECEIVER_CHAIN_ID,
receivingAssetId,
18,
"prepare",
false,
]);
});
});
describe("#calculateGasFeeInReceivingToken", () => {
let calculateGasFeeStub: SinonStub;
beforeEach(() => {
calculateGasFeeStub = Sinon.stub(chainReader as any, "calculateGasFee");
});
it("happy: should call calculateGasFee for fulfill", async () => {
const gasFee = BigNumber.from(71221304);
const assetId = mkAddress("0xb2");
calculateGasFeeStub.onFirstCall().resolves(gasFee);
const result = await chainReader.calculateGasFeeInReceivingTokenForFulfill(
TEST_RECEIVER_CHAIN_ID,
assetId,
18,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.eq(gasFee.toNumber());
expect(calculateGasFeeStub.getCall(0).args.slice(0, 5)).to.deep.eq([
TEST_RECEIVER_CHAIN_ID,
assetId,
18,
"fulfill",
false,
]);
});
});
describe("#calculateGasFee", () => {
const testEthPrice = utils.parseEther("31");
const testTokenPrice = utils.parseEther("7");
const testGasPrice = utils.parseUnits("5", "gwei");
let chainsPriceOraclesStub: SinonStub;
let tokenPriceStub: SinonStub;
let gasPriceStub: SinonStub;
beforeEach(() => {
chainsPriceOraclesStub = Sinon.stub(contractFns, "CHAINS_WITH_PRICE_ORACLES").value([1]);
tokenPriceStub = Sinon.stub(chainReader, "getTokenPrice");
gasPriceStub = Sinon.stub(chainReader, "getGasPrice");
tokenPriceStub.onFirstCall().resolves(BigNumber.from(testEthPrice));
tokenPriceStub.onSecondCall().resolves(BigNumber.from(testTokenPrice));
gasPriceStub.onFirstCall().resolves(BigNumber.from(testGasPrice));
});
it("happy: should calculate for prepare if chain included and prepare specified", async () => {
const result = await chainReader.calculateGasFee(
1,
mkAddress("0x0"),
18,
"prepare",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(5155432857142857);
});
it("happy: should calculate for fulfill if chain included and fulfill specified", async () => {
const result = await chainReader.calculateGasFee(
1,
mkAddress("0x0"),
18,
"fulfill",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(5524908571428571);
});
it("should return zero if price oracle isn't configured for that chain", async () => {
const result = await chainReader.calculateGasFee(
TEST_SENDER_CHAIN_ID,
mkAddress("0x0"),
18,
"prepare",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(0);
});
it("special case for chainId 10 prepare", async () => {
chainsPriceOraclesStub.value([1, 10]);
gasPriceStub.resolves(testGasPrice);
const result = await chainReader.calculateGasFee(
10,
mkAddress("0x0"),
18,
"prepare",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(5612085000000000);
});
it("special case for chainId 10 fulfill", async () => {
chainsPriceOraclesStub.value([1, 10]);
gasPriceStub.resolves(testGasPrice);
const result = await chainReader.calculateGasFee(
10,
mkAddress("0x0"),
18,
"fulfill",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(5834133571428571);
});
it("special case for chainId 10 cancel", async () => {
chainsPriceOraclesStub.value([1, 10]);
gasPriceStub.resolves(testGasPrice);
const result = await chainReader.calculateGasFee(
10,
mkAddress("0x0"),
18,
"cancel",
false,
undefined,
requestContextMock,
);
expect(result.toNumber()).to.be.eq(4832368571428571);
});
});
describe("#isSupportedChain", () => {
it("should return false for unsupported chain", async () => {
expect(chainReader.isSupportedChain(111111)).to.be.false;
});
});
describe("#getProvider", () => {
it("errors if cannot get provider", async () => {
// Replacing this method with the original fn not working.
(chainReader as any).getProvider.restore();
await expect(chainReader.readTx({ ...TEST_TX, chainId: 9999 })).to.be.rejectedWith(ProviderNotConfigured);
});
});
describe("#setupProviders", () => {
it("throws if not a single provider config is provided for a chainId", async () => {
(chainReader as any).config = {
[TEST_SENDER_CHAIN_ID.toString()]: {
// Providers list here should never be empty.
providers: [],
confirmations: 1,
gasStations: [],
},
};
expect(() => (chainReader as any).setupProviders(context, signer)).to.throw(ConfigurationError);
});
});
}); | the_stack |
import * as R from 'ramda';
import {Size, RGBColor, Vec2D} from '@compiler/core/types';
import {reverseByte} from '@compiler/core/utils/bits';
import {uuidX86Device} from '../../types';
import {VirtualMemBlockDriver} from '../../memory/VirtualMemBlockDriver';
import {ByteMemRegionAccessor} from '../../memory/MemoryRegion';
import {X86CPU} from '../../X86CPU';
import {VGAExternalRegs} from './VGAExternalRegs';
import {VGACrtcRegs} from './VGACrtcRegs';
import {VGADacRegs} from './VGADacRegs';
import {VGASequencerRegs} from './VGASequencerRegs';
import {VGAAttrRegs} from './VGAAttrRegs';
import {
VGAGraphicsRegs,
MemoryMapSelectType,
} from './VGAGraphicsRegs';
import {
GRAPHICS_MEMORY_MAPS,
GRAPHICS_RESERVED_MEM_MAP,
VGA_BANK_SIZE,
VGA_BUFFER_SIZE,
VGA_TOTAL_PLANES,
VGA_PIXEL_MEM_MAP,
VGA_CHARSET_SIZE,
VGA_CHAR_BYTE_SIZE,
GRAPHICS_ALU_OPS,
VGA_CHARSET_BANK_SIZE,
VGAFontPack,
GraphicsWriteMode,
} from './VGAConstants';
import {
VGA_8X16_FONT,
VGA_8X8_FONT,
assignPresetToVGA,
VGA256Palette,
} from './VGAModesPresets';
import {
VGACanvasRenderer,
VGATextModeCanvasRenderer,
VGAGraphicsModeCanvasRenderer,
VGAPixBufCanvasRenderer,
} from './Renderers';
type VGAMeasuredState = {
size: Size;
};
type VGATextModeState = VGAMeasuredState & {
charSize: Size,
};
type VGAGraphicsModeState = VGAMeasuredState & {
virtualSize: Size,
};
class VGA256State {
constructor(
public palette: RGBColor[] = VGA256Palette,
) {}
}
/**
* Basic graphics device
*
* @see {@link https://github.com/awesomekling/computron/blob/master/hw/vga.cpp}
* @see {@link https://github.com/copy/v86/blob/master/src/vga.js}
* @see {@link https://github.com/asmblah/jemul8/blob/feature/acceptance/js/core/classes/iodev/vga.js}
*
* @export
* @abstract
* @class VGA
* @extends {uuidX86Device<X86CPU>('vga')}
*/
export class VGA extends uuidX86Device<X86CPU>('vga') implements ByteMemRegionAccessor {
/* DOM */
private screenElement: HTMLElement;
/* VGA buffers */
private vga256: VGA256State;
private latch: number;
private renderer: VGACanvasRenderer;
private renderers: VGACanvasRenderer[];
/* size */
private pixelScreenSize: Size = new Size(0, 0);
private pixelScreenUpscaleWidth: number = null;
private textModeState: VGATextModeState;
private graphicsModeState: VGAGraphicsModeState;
/* graphics buffers */
private vgaBuffer: VirtualMemBlockDriver;
private planes: Uint8Array[];
private pixelBuffer: Uint8Array;
/* regs */
externalRegs: VGAExternalRegs;
graphicsRegs: VGAGraphicsRegs;
crtcRegs: VGACrtcRegs;
dacRegs: VGADacRegs;
sequencerRegs: VGASequencerRegs;
attrRegs: VGAAttrRegs;
/**
* Getters used only in text mode
*/
get textMem() { return this.planes[0]; }
get textAttrsMem() { return this.planes[1]; }
get textFontMem() { return this.planes[2]; }
/**
* Allocates memory, creates regsiters
*
* @memberof VideoAdapter
*/
init() {
this.reset();
this.initPorts();
}
release() {
super.release();
this.renderer?.release(); // eslint-disable-line no-unused-expressions
}
private initPorts(): void {
const {
crtcRegs, externalRegs, attrRegs,
sequencerRegs, dacRegs, vga256,
graphicsRegs,
} = this;
// CRT Controller Index Register
this.mountPortsHandler([0x3B4, 0x3D4], {
get: () => crtcRegs.indexReg,
set: (value) => {
crtcRegs.indexReg = value & 0x3f;
},
});
// CRT Controller Data Register
this.mountPortsHandler([0x3B5, 0x3D5], {
get: () => crtcRegs.getRegByIndex(),
set: (value) => { crtcRegs.setRegByIndex(value); },
});
// VGA Input Status Register 1
this.mountPortsHandler([0x3BA, 0x3DA], {
get: () => {
const {inputStatus1} = externalRegs;
attrRegs.next3c0IsIndex = true;
if ((inputStatus1.number & 0x01) !== 0)
inputStatus1.number &= ~0x30;
else
inputStatus1.number ^= 0x30;
return inputStatus1.number;
},
});
// Attribute Controller Index Register
this.mountPortsHandler(0x3C0, {
get: () => attrRegs.indexReg,
set: (value) => {
if (attrRegs.next3c0IsIndex) {
attrRegs.indexReg = value & 0x1f;
attrRegs.attrAddressReg.pas = (value & 0x20) >> 5;
} else
attrRegs.setRegByIndex(value);
attrRegs.next3c0IsIndex = !attrRegs.next3c0IsIndex;
},
});
// Attribute Controller Data Register
this.mountPortsHandler(0x3C1, {get: () => attrRegs.getRegByIndex()});
// VGA Input Status Register 0
this.mountPortsHandler(0x3C2, {
get: () => externalRegs.inputStatus0.number,
set: (value) => { externalRegs.miscReg.number = value; },
});
// VGA Sequencer Index register
this.mountPortsHandler(0x3C4, {
get: () => sequencerRegs.indexReg,
set: (value) => { sequencerRegs.indexReg = value; },
});
// VGA Sequencer Data register
this.mountPortsHandler(0x3C5, {
get: () => sequencerRegs.getRegByIndex(),
set: (value) => { sequencerRegs.setRegByIndex(value); },
});
// DAC Pixel Data Mask Register
this.mountPortsHandler(0x3C6, {
get: () => dacRegs.pixelMask,
set: (value) => { dacRegs.pixelMask = value; },
});
// DAC State Register
this.mountPortsHandler(0x3C7, {
get: () => dacRegs.stateReg.number,
set: (value) => {
dacRegs.colorIndexRead = value * 3;
dacRegs.stateReg.number &= 0x0;
},
});
// DAC Palette Write Index Register
this.mountPortsHandler(0x3C8, {
get: () => (dacRegs.colorIndexWrite / 3) | 0,
set: (value) => {
dacRegs.colorIndexWrite = value * 3;
dacRegs.stateReg.number |= 0x3;
},
});
// DAC Palette Data Register
this.mountPortsHandler(0x3C9, {
get: () => {
const index = dacRegs.colorIndexRead / 3 | 0;
const offset = dacRegs.colorIndexRead % 3;
const color = vga256.palette[index].toNumber();
dacRegs.colorIndexRead++;
return ((color >> ((2 - offset) * 8 & 0xFF)) / 255) * 63 | 0;
},
set: (value) => {
const {palette} = this.vga256;
const index = dacRegs.colorIndexWrite / 3 | 0;
const offset = dacRegs.colorIndexWrite % 3;
let color = palette[index].toNumber();
value = (((value & 0x3F) * 255) / 63) | 0;
if (offset === 0)
color = (color & ~0xFF0000) | (value << 16);
else if (offset === 1)
color = (color & ~0xFF00) | (value << 8);
else
color |= (color & ~0xFF);
palette[index] = RGBColor.fromNumber(color);
dacRegs.colorIndexWrite++;
},
});
// VGA Miscellaneous Output Register
this.mountPortsHandler(0x3CC, {get: () => externalRegs.miscReg.number});
// Graphics Controller Index Register
this.mountPortsHandler(0x3CE, {
get: () => graphicsRegs.indexReg,
set: (value) => { graphicsRegs.indexReg = value; },
});
// Graphics Controller Data Register
this.mountPortsHandler(0x3CF, {
get: () => graphicsRegs.getRegByIndex(),
set: (value) => { graphicsRegs.setRegByIndex(value); },
});
}
getPlanes(): Uint8Array[] { return this.planes; }
getPixelBuffer(): Uint8Array { return this.pixelBuffer; }
getPixelScreenSize(): Readonly<Size> { return this.pixelScreenSize; }
getPixelUpscaleWidth(): number { return this.pixelScreenUpscaleWidth; }
getGraphicsModeState(): Readonly<VGAGraphicsModeState> { return this.graphicsModeState; }
getVGA256State(): Readonly<VGA256State> { return this.vga256; }
getCurrentRenderer(): VGACanvasRenderer { return this.renderer; }
getScreenElement(): HTMLElement { return this.screenElement; }
setScreenElement(
{
upscaleWidth,
screenElement,
}: {
upscaleWidth?: number,
screenElement: HTMLElement,
},
): void {
this.screenElement = screenElement;
this.pixelScreenUpscaleWidth = upscaleWidth;
this.matchPixBufRenderer();
}
/**
* Text mode attributes
*/
getTextModeState(): Readonly<VGATextModeState> { return this.textModeState; }
setCursorLocation(vec: Vec2D): void {
const {textModeState, crtcRegs} = this;
crtcRegs.cursorLocation.number = (vec.y + 1) * textModeState.size.w + vec.x;
}
getTextCursorLocation(): Vec2D {
const {textModeState, crtcRegs} = this;
const cursorAddress = crtcRegs.cursorLocation.number;
return new Vec2D(
cursorAddress % textModeState.size.w,
Math.floor(cursorAddress / textModeState.size.w) - 1,
);
}
/**
* Graphics mode attributes
*/
getAddressShiftCount(): number {
const {
attrRegs,
crtcRegs: {
underlineLocation,
crtcModeControlReg,
},
} = this;
let shift = 0x80;
shift += ~underlineLocation.number & crtcModeControlReg.number & 0x40;
shift -= underlineLocation.number & 0x40;
shift -= attrRegs.attrModeControlReg.number & 0x40;
return shift >>> 6;
}
getBytesPerLine(): number {
const {
crtcRegs: {
offsetReg,
underlineLocation,
crtcModeControlReg,
},
} = this;
let bytes = offsetReg << 2;
if (underlineLocation.dw)
bytes <<= 1;
else if (crtcModeControlReg.wordByte)
bytes >>>= 1;
return bytes;
}
getStartAddress(): number {
const {crtcRegs} = this;
return crtcRegs.startAddress.number;
}
/**
* Iterates over pix buf renderers and takes first which matches
*
* @memberof VGA
*/
matchPixBufRenderer() {
if (!this.screenElement)
return;
// initialize pixel buffers on first call
if (!this.renderers) {
this.renderers = [
new VGATextModeCanvasRenderer(this),
new VGAGraphicsModeCanvasRenderer(this),
];
}
// search in list
const newRenderer = this.renderers.find(
(renderer) => renderer.isSuitable(),
);
/* eslint-disable no-unused-expressions */
if (newRenderer !== this.renderer) {
this.renderer?.release();
this.renderer = newRenderer;
this.renderer.alloc();
}
/* eslint-enable no-unused-expressions */
}
/**
* Resets values inside regs
*
* @memberof VGA
*/
reset() {
this.externalRegs = new VGAExternalRegs;
this.graphicsRegs = new VGAGraphicsRegs;
this.crtcRegs = new VGACrtcRegs;
this.dacRegs = new VGADacRegs;
this.sequencerRegs = new VGASequencerRegs;
this.attrRegs = new VGAAttrRegs;
/* Other */
this.vga256 = new VGA256State;
this.latch = 0;
this.textModeState = {
size: new Size(0, 0),
charSize: new Size(0, 0),
};
this.graphicsModeState = {
size: new Size(0, 0),
virtualSize: new Size(0, 0),
};
}
/**
* Loads preset stores in VGA_TEXT_MODES_PRESET / VGA_GRAPHICS_MODES_PRESET
*
* @param {number[]} preset
* @memberof VGA
*/
loadModePreset(preset: number[]): void {
assignPresetToVGA(this, preset);
// Post reset callbacks
this.allocPlanesBuffers();
this.measureMode();
this.matchPixBufRenderer();
// load predefined data for VGA Text mode
if (this.textMode)
this.loadTextModeDefaults();
}
/**
* Loads some default binaries into text mode mem
*
* @private
* @memberof VGA
*/
private loadTextModeDefaults(): void {
const {
textModeState: {
size,
},
textAttrsMem,
textModeState,
} = this;
this.writeFontPack(
textModeState.charSize.h === 16
? VGA_8X16_FONT
: VGA_8X8_FONT,
);
// set default foreground color
for (let i = 0; i < size.w * size.h; ++i)
textAttrsMem[i] = 0x7;
}
/**
* Writes VGA text font into plane 2
*
* @see {@link https://files.osdev.org/mirrors/geezer/osd/graphics/modes.c}
*
* @param {VGAFontPack} font
* @param {number} [fontIndex=0]
* @memberof VGA
*/
writeFontPack(font: VGAFontPack, fontIndex: number = 0): void {
const {charSize: {h: charH}, data} = font;
const plane = this.planes[2];
const fontOffset = fontIndex * VGA_CHARSET_BANK_SIZE;
for (let charIndex = 0; charIndex < VGA_CHARSET_SIZE; ++charIndex) {
for (let row = 0; row < charH; ++row) {
const charDestOffset = fontOffset + VGA_CHAR_BYTE_SIZE * charIndex + row;
const charTemplateOffset = charH * charIndex + row;
plane[charDestOffset] = reverseByte(data[charTemplateOffset]);
}
}
}
/**
* Scroll text in plane 0 to UP
*
* @param {number} [lines=0x1]
* @param {number} [page=0x0]
* @memberof VGA
*/
scrollTextUp(lines: number = 0x1, page: number = 0x0): void {
const {textModeState, planes} = this;
const {w, h} = textModeState.size;
const offset = this.getStartAddress();
const pageSize = w * h;
const startOffset = offset + pageSize * page;
const [textMem, attrMem] = planes;
/** Copy previous lines memory */
for (let y = 0; y < h; ++y) {
const last = y + 1 === h;
for (let x = 0; x < w; ++x) {
const dest = startOffset + y * w + x;
const src = startOffset + (y + lines) * w + x;
if (last) {
textMem[dest] = 0;
attrMem[dest] = 0x7;
} else {
textMem[dest] = textMem[src];
attrMem[dest] = attrMem[src];
}
}
}
}
/**
* Creates VRAM buffers / planes
*
* @private
* @memberof VGA
*/
private allocPlanesBuffers() {
this.vgaBuffer = VirtualMemBlockDriver.alloc(VGA_BUFFER_SIZE);
this.pixelBuffer = new Uint8Array(this.vgaBuffer.device.buffer, VGA_PIXEL_MEM_MAP.low, VGA_PIXEL_MEM_MAP.size - 1);
this.planes = R.times(
(index) => new Uint8Array(this.vgaBuffer.device.buffer, index * VGA_BANK_SIZE, VGA_BANK_SIZE),
VGA_TOTAL_PLANES,
);
}
/**
* Calculates mode size:
* - text mode in cols / rows
* - graphical mode in width / height (px)
*
* @see {@link https://github.com/copy/v86/blob/master/src/vga.js#L1164}
*
* @private
* @memberof VGA
*/
private measureMode() {
const {
textMode, textModeState,
graphicsModeState, pixelScreenSize,
crtcRegs, attrRegs, sequencerRegs,
} = this;
const horizontalCharacters = Math.min(
1 + crtcRegs.endHorizontalDisplayReg,
crtcRegs.startHorizontalBlankingReg,
);
let verticalScans = Math.min(
1 + crtcRegs.getVerticalDisplayEnd(),
crtcRegs.getVerticalBlankingStart(),
);
if (!horizontalCharacters || !verticalScans)
return;
// text mode
if (textMode) {
const {maxScanLineReg} = crtcRegs;
const {sd, maxScanLine} = maxScanLineReg;
// doubling
if (sd)
verticalScans >>>= 1;
// sets size
textModeState.size.w = horizontalCharacters;
textModeState.size.h = verticalScans / (1 + maxScanLine) | 0;
// sets single character size
// 1 if 8 dots mode
textModeState.charSize.w = 9 - sequencerRegs.clockingModeReg.dotMode8or9;
textModeState.charSize.h = maxScanLine + 1;
// calculates total canvas size
pixelScreenSize.w = textModeState.charSize.w * textModeState.size.w;
pixelScreenSize.h = textModeState.charSize.h * textModeState.size.h;
} else {
// graphics mode
const screenSize = new Size(
horizontalCharacters << 3,
this.scanLineToRow(verticalScans),
);
const virtualSize = new Size(
crtcRegs.offsetReg << 4,
Math.ceil(GRAPHICS_MEMORY_MAPS[0b00].size / this.getBytesPerLine()),
);
if (attrRegs.attrModeControlReg.bit8) {
screenSize.w >>>= 1;
virtualSize.w >>>= 1;
}
graphicsModeState.size.assign(screenSize);
graphicsModeState.virtualSize.assign(virtualSize);
pixelScreenSize.assign(screenSize);
}
}
/**
* Converts scan line to row number
*
* @private
* @param {number} scanLine
* @returns {number}
* @memberof VGA
*/
private scanLineToRow(scanLine: number): number {
const {crtcRegs: {maxScanLineReg, crtcModeControlReg}} = this;
// double scanning
if (maxScanLineReg.sd)
scanLine >>>= 1;
scanLine = Math.ceil(scanLine / (1 + (maxScanLineReg.maxScanLine)));
if (!crtcModeControlReg.map13)
scanLine <<= 1;
if (!crtcModeControlReg.map14)
scanLine <<= 1;
return scanLine;
}
/**
* Get flag if VGA is in text mode
*
* @readonly
* @type {boolean}
* @memberof VGA
*/
get textMode(): boolean {
return this.graphicsRegs.miscGraphicsReg.alphanumericModeDisable === 0x0;
}
/**
* Get current CPU mapped mem region
*
* @readonly
* @type {MemoryMapSelectType}
* @memberof VGA
*/
get memoryMapSelect(): MemoryMapSelectType {
return this.graphicsRegs.miscGraphicsReg.memoryMapSelect;
}
/**
* @todo Add write to VRAM
*
* @param {number} address
* @param {number} value
* @returns {number}
* @memberof VGA
*/
writeByte(address: number, value: number): number {
if (!GRAPHICS_RESERVED_MEM_MAP.contains(address))
return null;
const {miscReg} = this.externalRegs;
if (!miscReg.ramEnable)
return null;
const {memoryMapSelect} = this;
const mode = GRAPHICS_MEMORY_MAPS[memoryMapSelect];
if (!mode.contains(address))
return null;
const offset = address - mode.low;
/** TEXT MODE */
if (this.textMode) {
this.writeTextMode(offset, value);
return 1;
}
/** GRAPHICAL MODE */
this.writeGraphicsMode(offset, value);
return 1;
}
/**
* Read value from vram
*
* @param {number} address
* @returns {number}
* @memberof VGA
*/
readByte(address: number): number {
if (!GRAPHICS_RESERVED_MEM_MAP.contains(address))
return null;
const {memoryMapSelect, planes, vgaBuffer} = this;
const mode = GRAPHICS_MEMORY_MAPS[memoryMapSelect];
if (!mode.contains(address))
return null;
let offset = address - mode.low;
/** TEXT MODE */
if (this.textMode)
return this.readTextMode(offset);
/** GRAPHICS MODE */
this.latch = (
planes[0][offset]
| (planes[1][offset] << 8)
| (planes[2][offset] << 16)
| (planes[3][offset] << 24)
);
const {memModeReg} = this.sequencerRegs;
const {
readMapSelectReg,
graphicsModeReg,
colorDontCareReg,
} = this.graphicsRegs;
// read mode 1
if (graphicsModeReg.readMode) {
const {colorDontCare} = colorDontCareReg;
let data = 0xFF;
if (colorDontCare & 0x1) data &= planes[0][offset] ^ ~(colorDontCare & 0x1 ? 0xFF : 0x00);
if (colorDontCare & 0x2) data &= planes[1][offset] ^ ~(colorDontCare & 0x2 ? 0xFF : 0x00);
if (colorDontCare & 0x4) data &= planes[2][offset] ^ ~(colorDontCare & 0x4 ? 0xFF : 0x00);
if (colorDontCare & 0x8) data &= planes[3][offset] ^ ~(colorDontCare & 0x8 ? 0xFF : 0x00);
return data;
}
// read mode 0
let plane = readMapSelectReg.readMapSelect;
if (memModeReg.chain4) {
plane = offset & 0x3;
offset &= ~0x3;
} else if (graphicsModeReg.hostOddEvenMemoryReadAddrEnable) {
plane = offset & 0x1;
offset &= ~0x1;
}
return vgaBuffer.device[(plane << 16) | offset];
}
/**
* Writes single character to text memory
*
* @see {@link http://www.scs.stanford.edu/09wi-cs140/pintos/specs/freevga/vga/vgatext.htm}
*
* @param {number} address
* @param {number} byte
* @memberof VGA
*/
writeTextMode(address: number, byte: number): void {
const {planes} = this;
if (address % 2 === 0)
planes[0][address >> 1] = byte;
else
planes[1][(address - 1) >> 1] = byte;
}
/**
* Reads single byte from text mem
*
* @param {number} address
* @returns {number}
* @memberof VGA
*/
readTextMode(address: number): number {
const {planes} = this;
if (address % 2 === 0)
return planes[0][address >> 1];
return planes[1][(address - 1) >> 1];
}
/**
* Repeats 8 bit number 4 times in 32 number
*
* @static
* @param {number} byte
* @returns {number}
* @memberof VGA
*/
static repeatByteInDword(byte: number): number {
return (
byte
| (byte << 8)
| (byte << 16)
| (byte << 24)
);
}
/**
* Watches bits 0 to 3, each bit is 0xFF in output
* so:
* 0b11 => 0xFF_FF, 0b111 => 0xFF_FF_FF
*
* @static
* @param {number} byte
* @returns {number}
* @memberof VGA
*/
static applyExpand(byte: number): number {
let dword = byte & 0x1 ? 0xFF : 0x00;
dword |= (byte & 0x2 ? 0xFF : 0x00) << 8;
dword |= (byte & 0x4 ? 0xFF : 0x00) << 16;
dword |= (byte & 0x8 ? 0xFF : 0x00) << 24;
return dword;
}
/**
* Barrel Shifter
*
* @see {@link http://www.phatcode.net/res/224/files/html/ch25/25-01.html#Heading3}
* @see {@link http://www.osdever.net/FreeVGA/vga/graphreg.htm#03}
*
* @param {number} byte
* @returns {number}
* @memberof VGA
*/
applyGraphicsRegRotate(byte: number): number {
const {rotateCount} = this.graphicsRegs.dataRotateReg;
const wrapped = byte | (byte << 8);
const count = rotateCount & 0x7;
const shifted = wrapped >>> count;
return shifted & 0xFF;
}
/**
* Set / Reset Circuitry
*
* @see {@link http://www.phatcode.net/res/224/files/html/ch25/25-03.html#Heading5}
* @see {@link http://www.osdever.net/FreeVGA/vga/graphreg.htm#00}
*
* @param {number} value
* @returns {number}
* @memberof VGA
*/
applySetResetReg(value: number): number {
const {
graphicsRegs: {
enableSetResetReg,
setResetReg,
},
} = this;
const setResetDword = VGA.applyExpand(setResetReg.number);
const enableSetResetDword = VGA.applyExpand(enableSetResetReg.number);
value |= enableSetResetDword & setResetDword;
value &= ~enableSetResetDword | setResetDword;
return value;
}
/**
* Perform logical operation based on dataRotateReg. ALU unit
*
* @param {number} value
* @returns {number}
* @memberof VGA
*/
applyLogicalReg(value: number): number {
const {latch} = this;
const {logicalOperation} = this.graphicsRegs.dataRotateReg;
return GRAPHICS_ALU_OPS[logicalOperation](value, latch);
}
/**
* Apply bitmask value from reg to value and latch
*
* @param {number} bitmask
* @param {number} value
* @returns {number}
* @memberof VGA
*/
applyLatchBitmask(bitmask: number, value: number): number {
const {latch} = this;
return (bitmask & value) | (~bitmask & latch);
}
/**
* Converts vga address to offset in pixel buffer
*
* @private
* @param {number} address
* @returns {number}
* @memberof VGA
*/
private vgaAddressToPixAddress(address: number): number {
const shiftCount = this.getAddressShiftCount();
const {
crtcRegs: {
crtcModeControlReg,
},
} = this;
if (~crtcModeControlReg.number & 0x3) {
const {graphicsModeState: {virtualSize}} = this;
const startAddress = this.getStartAddress();
let pixelAddr = address - startAddress;
pixelAddr &= (crtcModeControlReg.number << 13) | ~0x6000;
pixelAddr <<= shiftCount;
// Decompose address
let row = pixelAddr / virtualSize.w | 0;
const col = pixelAddr % virtualSize.w;
switch (crtcModeControlReg.number & 0x3) {
case 0x2: row = (row << 1) | ((address >> 13) & 0x1); break;
case 0x1: row = (row << 1) | ((address >> 14) & 0x1); break;
case 0x0: row = (row << 2) | ((address >> 13) & 0x3); break;
default:
}
return row * virtualSize.w + col + (startAddress << shiftCount);
}
return address << shiftCount;
}
/**
* Write to memory in graphical mode
*
* @see {@link https://github.com/copy/v86/blob/master/src/vga.js#L681}
* @see {@link http://www.osdever.net/FreeVGA/vga/graphreg.htm#05}
*
* @param {number} address
* @param {number} byte
* @memberof VGA
*/
writeGraphicsMode(address: number, byte: number): void {
const {
renderer,
planes,
latch,
sequencerRegs: {
memModeReg,
mapMaskReg,
},
graphicsRegs: {
graphicsModeReg,
colorBitmaskReg,
setResetReg,
},
} = this;
const {writeMode} = graphicsModeReg;
let bitmask = VGA.repeatByteInDword(colorBitmaskReg.bitmask);
// choose write value
let outputDword: number = 0x0;
switch (writeMode) {
case GraphicsWriteMode.MODE_0:
outputDword = this.applyGraphicsRegRotate(byte);
outputDword = VGA.repeatByteInDword(outputDword);
outputDword = this.applySetResetReg(outputDword);
outputDword = this.applyLogicalReg(outputDword);
outputDword = this.applyLatchBitmask(bitmask, outputDword);
break;
case GraphicsWriteMode.MODE_1:
outputDword = latch;
break;
case GraphicsWriteMode.MODE_2:
outputDword = VGA.applyExpand(byte);
outputDword = this.applyLogicalReg(outputDword);
outputDword = this.applyLatchBitmask(bitmask, outputDword);
break;
case GraphicsWriteMode.MODE_3:
bitmask &= VGA.repeatByteInDword(this.applyGraphicsRegRotate(byte));
outputDword = VGA.applyExpand(setResetReg.number);
outputDword = this.applyLatchBitmask(bitmask, outputDword);
break;
default:
console.warn(`VGA: unsupported write mode ${writeMode}!`);
}
// plane index select
let planeSelect = 0xF;
const {oddEventHostMemWriteAddressDisable, chain4} = memModeReg;
if (oddEventHostMemWriteAddressDisable === 0x0) {
planeSelect = 0x5 << (address & 0x1);
address &= ~0x1;
} else if (chain4) {
planeSelect = 1 << (address & 0x3);
address &= ~0x3;
}
// See: http://www.osdever.net/FreeVGA/vga/seqreg.htm#02
planeSelect &= mapMaskReg.memPlaneWriteEnable;
// write to mem
if (planeSelect & 0x1) planes[0][address] = (outputDword >> 0) & 0xFF;
if (planeSelect & 0x2) planes[1][address] = (outputDword >> 8) & 0xFF;
if (planeSelect & 0x4) planes[2][address] = (outputDword >> 16) & 0xFF;
if (planeSelect & 0x8) planes[3][address] = (outputDword >> 24) & 0xFF;
// mark renderer region as dirty
const pixelAddress = this.vgaAddressToPixAddress(address);
(<VGAPixBufCanvasRenderer> renderer).markRegionAsDirty(pixelAddress, pixelAddress + 0x8);
}
} | the_stack |
//@ts-check
///<reference path="devkit.d.ts" />
declare namespace DevKit {
namespace Formmsdyn_actual_Information {
interface tab__88D21A95_2EBC_49E9_A568_6A7A0107BCEC_Sections {
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_10: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_3: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_4: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_5: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_6: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_8: DevKit.Controls.Section;
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC_SECTION_9: DevKit.Controls.Section;
Accounting: DevKit.Controls.Section;
}
interface tab_FieldService_Sections {
tab_2_section_1: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
}
interface tab__88D21A95_2EBC_49E9_A568_6A7A0107BCEC extends DevKit.Controls.ITab {
Section: tab__88D21A95_2EBC_49E9_A568_6A7A0107BCEC_Sections;
}
interface tab_FieldService extends DevKit.Controls.ITab {
Section: tab_FieldService_Sections;
}
interface Tabs {
_88D21A95_2EBC_49E9_A568_6A7A0107BCEC: tab__88D21A95_2EBC_49E9_A568_6A7A0107BCEC;
FieldService: tab_FieldService;
}
interface Body {
Tab: Tabs;
/** Select the customer. */
msdyn_AccountCustomer: DevKit.Controls.Lookup;
msdyn_AccountingDate: DevKit.Controls.Date;
msdyn_AccountVendor: DevKit.Controls.Lookup;
/** Shows the adjustment status ID of the transaction. */
msdyn_AdjustmentStatus: DevKit.Controls.OptionSet;
msdyn_Agreement: DevKit.Controls.Lookup;
/** Enter the amount in transaction currency. */
msdyn_Amount: DevKit.Controls.Money;
/** Select the method by which the amount was computed. */
msdyn_AmountMethod: DevKit.Controls.OptionSet;
/** Enter the cost amount of the sales transaction in the transaction currency. */
msdyn_BasisAmount: DevKit.Controls.Money;
/** Enter the cost quantity of the sales transaction in the base (organization) currency. */
msdyn_BasisQuantity: DevKit.Controls.Decimal;
/** Select the billing status ID. */
msdyn_BillingStatus: DevKit.Controls.OptionSet;
/** Select the billing type ID. */
msdyn_BillingType: DevKit.Controls.OptionSet;
/** Shows the bookable resource for which the actual is recorded. */
msdyn_bookableresource: DevKit.Controls.Lookup;
/** Select the customer contact. */
msdyn_ContactCustomer: DevKit.Controls.Lookup;
msdyn_ContactVendor: DevKit.Controls.Lookup;
/** Select the organizational unit ID for the contract. */
msdyn_contractorganizationalunitid: DevKit.Controls.Lookup;
/** Select the customer type ID. */
msdyn_CustomerType: DevKit.Controls.OptionSet;
/** Type the record description. */
msdyn_description: DevKit.Controls.String;
/** Enter the transaction date of the business event. */
msdyn_DocumentDate: DevKit.Controls.Date;
/** Enter the end date and time for this transaction. */
msdyn_EndDateTime: DevKit.Controls.DateTime;
/** The external description of the business transaction. */
msdyn_externaldescription: DevKit.Controls.String;
msdyn_IncidentType: DevKit.Controls.Lookup;
/** The unique identifier of an invoice. */
msdyn_Invoice: DevKit.Controls.Lookup;
msdyn_IsJournalized: DevKit.Controls.Boolean;
/** Type of journal for resource cost. */
msdyn_JournalType: DevKit.Controls.OptionSet;
/** Enter the percent. */
msdyn_Percent: DevKit.Controls.Decimal;
/** Enter the price in the transaction currency. */
msdyn_Price: DevKit.Controls.Money;
/** Select the price list. */
msdyn_PriceList: DevKit.Controls.Lookup;
/** Select the product ID. */
msdyn_Product: DevKit.Controls.Lookup;
msdyn_ProductType: DevKit.Controls.OptionSet;
/** Select the project ID. */
msdyn_Project: DevKit.Controls.Lookup;
/** Enter the quantity. */
msdyn_Quantity: DevKit.Controls.Decimal;
/** Select the role ID of the resource performing the work. */
msdyn_ResourceCategory: DevKit.Controls.Lookup;
/** Organizational unit at the time the actual was registered of the resource who performed the work. */
msdyn_ResourceOrganizationalUnitId: DevKit.Controls.Lookup;
/** Account that was serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
msdyn_ServiceTerritory: DevKit.Controls.Lookup;
/** Enter the start date and time. */
msdyn_StartDateTime: DevKit.Controls.DateTime;
/** Select the task. */
msdyn_Task: DevKit.Controls.Lookup;
msdyn_TaxCode: DevKit.Controls.Lookup;
/** Select the transaction category. */
msdyn_TransactionCategory: DevKit.Controls.Lookup;
/** Shows the transaction classification of this transaction. */
msdyn_TransactionClassification: DevKit.Controls.OptionSet;
/** Shows the transaction type of this transaction. */
msdyn_TransactionTypeCode: DevKit.Controls.OptionSet;
/** Select the unit of measure. */
msdyn_Unit: DevKit.Controls.Lookup;
/** Select the unit schedule. */
msdyn_UnitSchedule: DevKit.Controls.Lookup;
msdyn_VendorType: DevKit.Controls.OptionSet;
msdyn_Warehouse: DevKit.Controls.Lookup;
msdyn_WorkLocation: DevKit.Controls.OptionSet;
msdyn_WorkOrder: DevKit.Controls.Lookup;
msdyn_WorkOrderType: DevKit.Controls.Lookup;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
/** Shows the currency associated with the entity. */
TransactionCurrencyId: DevKit.Controls.Lookup;
}
}
class Formmsdyn_actual_Information extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form msdyn_actual_Information
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form msdyn_actual_Information */
Body: DevKit.Formmsdyn_actual_Information.Body;
}
class msdyn_actualApi {
/**
* DynamicsCrm.DevKit msdyn_actualApi
* @param entity The entity object
*/
constructor(entity?: any);
/**
* Get the value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedValue(alias: string, isMultiOptionSet?: boolean): any;
/**
* Get the formatted value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string;
/** The entity object */
Entity: any;
/** The entity name */
EntityName: string;
/** The entity collection name */
EntityCollectionName: string;
/** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */
"@odata.etag": string;
/** Unique identifier of the user who created the record. */
CreatedBy: DevKit.WebApi.LookupValueReadonly;
/** Date and time when the record was created. */
CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Unique identifier of the delegate user who created the record. */
CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Exchange rate for the currency associated with the entity with respect to the base currency. */
ExchangeRate: DevKit.WebApi.DecimalValueReadonly;
/** Sequence number of the import that created this record. */
ImportSequenceNumber: DevKit.WebApi.IntegerValue;
/** Unique identifier of the user who modified the record. */
ModifiedBy: DevKit.WebApi.LookupValueReadonly;
/** Date and time when the record was modified. */
ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Unique identifier of the delegate user who modified the record. */
ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Select the customer. */
msdyn_AccountCustomer: DevKit.WebApi.LookupValue;
msdyn_AccountingDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
msdyn_AccountVendor: DevKit.WebApi.LookupValue;
/** Unique identifier for entity instances */
msdyn_actualId: DevKit.WebApi.GuidValue;
/** Shows the adjustment status ID of the transaction. */
msdyn_AdjustmentStatus: DevKit.WebApi.OptionSetValue;
msdyn_Agreement: DevKit.WebApi.LookupValue;
/** Enter the amount in transaction currency. */
msdyn_Amount: DevKit.WebApi.MoneyValue;
/** Enter the value of the amount in the base (organization) currency. */
msdyn_amount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Select the method by which the amount was computed. */
msdyn_AmountMethod: DevKit.WebApi.OptionSetValue;
/** Enter the cost amount of the sales transaction in the transaction currency. */
msdyn_BasisAmount: DevKit.WebApi.MoneyValue;
/** Enter the cost amount of the sales transaction in the base (organization) currency. */
msdyn_basisamount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Enter the cost price of the sales transaction in transaction currency. */
msdyn_BasisPrice: DevKit.WebApi.MoneyValue;
/** Enter the cost price of the sales transaction in base (organization) currency. */
msdyn_basisprice_Base: DevKit.WebApi.MoneyValueReadonly;
/** Enter the cost quantity of the sales transaction in the base (organization) currency. */
msdyn_BasisQuantity: DevKit.WebApi.DecimalValue;
/** Select the billing status ID. */
msdyn_BillingStatus: DevKit.WebApi.OptionSetValue;
/** Select the billing type ID. */
msdyn_BillingType: DevKit.WebApi.OptionSetValue;
/** Shows the bookable resource for which the actual is recorded. */
msdyn_bookableresource: DevKit.WebApi.LookupValue;
/** Select the customer contact. */
msdyn_ContactCustomer: DevKit.WebApi.LookupValue;
msdyn_ContactVendor: DevKit.WebApi.LookupValue;
/** Select the organizational unit ID for the contract. */
msdyn_contractorganizationalunitid: DevKit.WebApi.LookupValue;
/** Select the customer type ID. */
msdyn_CustomerType: DevKit.WebApi.OptionSetValue;
/** Type the record description. */
msdyn_description: DevKit.WebApi.StringValue;
/** Enter the transaction date of the business event. */
msdyn_DocumentDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the end date and time for this transaction. */
msdyn_EndDateTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Enter the date of the exchange rate used for this transaction. */
msdyn_ExchangeRateDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** The external description of the business transaction. */
msdyn_externaldescription: DevKit.WebApi.StringValue;
/** Stores a date from an external system, such as a journal entry voucher date from an ERP system */
msdyn_ExternalReferenceDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Stores an ID from an external system, such as the journal entry voucher number from an ERP system. */
msdyn_ExternalReferenceID: DevKit.WebApi.StringValue;
msdyn_IncidentType: DevKit.WebApi.LookupValue;
/** The unique identifier of an invoice. */
msdyn_Invoice: DevKit.WebApi.LookupValue;
msdyn_IsJournalized: DevKit.WebApi.BooleanValue;
/** Type of journal for resource cost. */
msdyn_JournalType: DevKit.WebApi.OptionSetValue;
/** Enter the percent. */
msdyn_Percent: DevKit.WebApi.DecimalValue;
/** Enter the price in the transaction currency. */
msdyn_Price: DevKit.WebApi.MoneyValue;
/** Enter the price in the base (organization) currency. */
msdyn_price_Base: DevKit.WebApi.MoneyValueReadonly;
/** Select the price list. */
msdyn_PriceList: DevKit.WebApi.LookupValue;
/** Select the product ID. */
msdyn_Product: DevKit.WebApi.LookupValue;
msdyn_ProductType: DevKit.WebApi.OptionSetValue;
/** Select the project ID. */
msdyn_Project: DevKit.WebApi.LookupValue;
/** Enter the quantity. */
msdyn_Quantity: DevKit.WebApi.DecimalValue;
/** Select the role ID of the resource performing the work. */
msdyn_ResourceCategory: DevKit.WebApi.LookupValue;
/** Organizational unit at the time the actual was registered of the resource who performed the work. */
msdyn_ResourceOrganizationalUnitId: DevKit.WebApi.LookupValue;
/** Select the project contract. */
msdyn_SalesContract: DevKit.WebApi.LookupValue;
/** (Deprecated) Type the project contract line. */
msdyn_SalesContractLine: DevKit.WebApi.StringValue;
/** Unique identifier for Project Contract Line associated with Actual. */
msdyn_SalesContractLineId: DevKit.WebApi.LookupValue;
/** Account that was serviced */
msdyn_ServiceAccount: DevKit.WebApi.LookupValue;
msdyn_ServiceTerritory: DevKit.WebApi.LookupValue;
/** Enter the start date and time. */
msdyn_StartDateTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Select the task. */
msdyn_Task: DevKit.WebApi.LookupValue;
msdyn_TaxCode: DevKit.WebApi.LookupValue;
/** Select the transaction category. */
msdyn_TransactionCategory: DevKit.WebApi.LookupValue;
/** Shows the transaction classification of this transaction. */
msdyn_TransactionClassification: DevKit.WebApi.OptionSetValue;
/** Shows the transaction type of this transaction. */
msdyn_TransactionTypeCode: DevKit.WebApi.OptionSetValue;
/** Select the unit of measure. */
msdyn_Unit: DevKit.WebApi.LookupValue;
/** Select the unit schedule. */
msdyn_UnitSchedule: DevKit.WebApi.LookupValue;
msdyn_VendorType: DevKit.WebApi.OptionSetValue;
msdyn_Warehouse: DevKit.WebApi.LookupValue;
msdyn_WorkLocation: DevKit.WebApi.OptionSetValue;
msdyn_WorkOrder: DevKit.WebApi.LookupValue;
msdyn_WorkOrderType: DevKit.WebApi.LookupValue;
/** Date and time that the record was migrated. */
OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */
OwnerId_systemuser: DevKit.WebApi.LookupValue;
/** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */
OwnerId_team: DevKit.WebApi.LookupValue;
/** Unique identifier for the business unit that owns the record */
OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the team that owns the record. */
OwningTeam: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the user that owns the record. */
OwningUser: DevKit.WebApi.LookupValueReadonly;
/** Status of the Actual */
statecode: DevKit.WebApi.OptionSetValue;
/** Reason for the status of the Actual */
statuscode: DevKit.WebApi.OptionSetValue;
/** For internal use only. */
TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue;
/** Shows the currency associated with the entity. */
TransactionCurrencyId: DevKit.WebApi.LookupValue;
/** Time zone code that was in use when the record was created. */
UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue;
/** Version Number */
VersionNumber: DevKit.WebApi.BigIntValueReadonly;
}
}
declare namespace OptionSet {
namespace msdyn_actual {
enum msdyn_AdjustmentStatus {
/** 192350001 */
Adjusted,
/** 192350000 */
In_Process,
/** 192350002 */
Unadjustable
}
enum msdyn_AmountMethod {
/** 192350001 */
Fixed_Price,
/** 192350003 */
Multiply_Basis_Amount_By_Percent,
/** 192350002 */
Multiply_Basis_Quantity_By_Price,
/** 192350000 */
Multiply_Quantity_By_Price,
/** 690970000 */
Tax_Calculation
}
enum msdyn_BillingStatus {
/** 192350003 */
Canceled,
/** 192350001 */
Customer_Invoice_Created,
/** 192350002 */
Customer_Invoice_Posted,
/** 192350004 */
Ready_to_Invoice,
/** 192350000 */
Unbilled_Sales_Created,
/** 690970000 */
Work_order_closed_posted
}
enum msdyn_BillingType {
/** 192350001 */
Chargeable,
/** 192350002 */
Complimentary,
/** 192350000 */
Non_Chargeable,
/** 192350003 */
Not_Available
}
enum msdyn_CustomerType {
/** 192350001 */
Account,
/** 192350002 */
Contact
}
enum msdyn_JournalType {
/** 690970001 */
Break,
/** 690970004 */
Business_Closure,
/** 690970003 */
Overtime,
/** 690970002 */
Travel,
/** 690970000 */
Working_Hours
}
enum msdyn_ProductType {
/** 690970000 */
Inventory,
/** 690970001 */
Non_Inventory,
/** 690970002 */
Service
}
enum msdyn_TransactionClassification {
/** 690970001 */
Additional,
/** 690970000 */
Commission,
/** 192350001 */
Expense,
/** 192350004 */
Fee,
/** 192350002 */
Material,
/** 192350003 */
Milestone,
/** 690970002 */
Tax,
/** 192350000 */
Time
}
enum msdyn_TransactionTypeCode {
/** 192350006 */
Billed_Sales,
/** 192350000 */
Cost,
/** 192350008 */
Inter_Organizational_Sales,
/** 192350004 */
Project_Contract,
/** 192350007 */
Resourcing_Unit_Cost,
/** 192350005 */
Unbilled_Sales
}
enum msdyn_VendorType {
/** 192350001 */
Account,
/** 192350002 */
Contact
}
enum msdyn_WorkLocation {
/** 690970001 */
Facility,
/** 690970002 */
Location_Agnostic,
/** 690970000 */
Onsite
}
enum statecode {
/** 0 */
Active,
/** 1 */
Inactive
}
enum statuscode {
/** 1 */
Active,
/** 2 */
Inactive
}
enum RollupState {
/** 0 - Attribute value is yet to be calculated */
NotCalculated,
/** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */
Calculated,
/** 2 - Attribute value calculation lead to overflow error */
OverflowError,
/** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */
OtherError,
/** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */
RetryLimitExceeded,
/** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */
HierarchicalRecursionLimitReached,
/** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */
LoopDetected
}
}
}
//{'JsForm':['Information'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'} | the_stack |
import { CallOptions } from '../../CallOptions';
import { DeoptimizableEntity } from '../../DeoptimizableEntity';
import { HasEffectsContext } from '../../ExecutionContext';
import { EVENT_ACCESSED, EVENT_CALLED, NodeEvent } from '../../NodeEvents';
import {
ObjectPath,
ObjectPathKey,
PathTracker,
UNKNOWN_INTEGER_PATH,
UNKNOWN_PATH,
UnknownInteger,
UnknownKey
} from '../../utils/PathTracker';
import {
ExpressionEntity,
LiteralValueOrUnknown,
UNKNOWN_EXPRESSION,
UnknownValue
} from './Expression';
export interface ObjectProperty {
key: ObjectPathKey;
kind: 'init' | 'set' | 'get';
property: ExpressionEntity;
}
export interface PropertyMap {
[key: string]: ExpressionEntity[];
}
const INTEGER_REG_EXP = /^\d+$/;
export class ObjectEntity extends ExpressionEntity {
private readonly allProperties: ExpressionEntity[] = [];
private readonly deoptimizedPaths: Record<string, boolean> = Object.create(null);
private readonly expressionsToBeDeoptimizedByKey: Record<string, DeoptimizableEntity[]> =
Object.create(null);
private readonly gettersByKey: PropertyMap = Object.create(null);
private hasUnknownDeoptimizedInteger = false;
private hasUnknownDeoptimizedProperty = false;
private readonly propertiesAndGettersByKey: PropertyMap = Object.create(null);
private readonly propertiesAndSettersByKey: PropertyMap = Object.create(null);
private readonly settersByKey: PropertyMap = Object.create(null);
private readonly thisParametersToBeDeoptimized = new Set<ExpressionEntity>();
private readonly unknownIntegerProps: ExpressionEntity[] = [];
private readonly unmatchableGetters: ExpressionEntity[] = [];
private readonly unmatchablePropertiesAndGetters: ExpressionEntity[] = [];
private readonly unmatchableSetters: ExpressionEntity[] = [];
// If a PropertyMap is used, this will be taken as propertiesAndGettersByKey
// and we assume there are no setters or getters
constructor(
properties: ObjectProperty[] | PropertyMap,
private prototypeExpression: ExpressionEntity | null,
private immutable = false
) {
super();
if (Array.isArray(properties)) {
this.buildPropertyMaps(properties);
} else {
this.propertiesAndGettersByKey = this.propertiesAndSettersByKey = properties;
for (const propertiesForKey of Object.values(properties)) {
this.allProperties.push(...propertiesForKey);
}
}
}
deoptimizeAllProperties(): void {
if (this.hasUnknownDeoptimizedProperty) {
return;
}
this.hasUnknownDeoptimizedProperty = true;
for (const properties of Object.values(this.propertiesAndGettersByKey).concat(
Object.values(this.settersByKey)
)) {
for (const property of properties) {
property.deoptimizePath(UNKNOWN_PATH);
}
}
// While the prototype itself cannot be mutated, each property can
this.prototypeExpression?.deoptimizePath([UnknownKey, UnknownKey]);
this.deoptimizeCachedEntities();
}
deoptimizeIntegerProperties(): void {
if (this.hasUnknownDeoptimizedProperty || this.hasUnknownDeoptimizedInteger) {
return;
}
this.hasUnknownDeoptimizedInteger = true;
for (const [key, propertiesAndGetters] of Object.entries(this.propertiesAndGettersByKey)) {
if (INTEGER_REG_EXP.test(key)) {
for (const property of propertiesAndGetters) {
property.deoptimizePath(UNKNOWN_PATH);
}
}
}
this.deoptimizeCachedIntegerEntities();
}
deoptimizePath(path: ObjectPath): void {
if (this.hasUnknownDeoptimizedProperty || this.immutable) return;
const key = path[0];
if (path.length === 1) {
if (typeof key !== 'string') {
if (key === UnknownInteger) {
return this.deoptimizeIntegerProperties();
}
return this.deoptimizeAllProperties();
}
if (!this.deoptimizedPaths[key]) {
this.deoptimizedPaths[key] = true;
// we only deoptimizeCache exact matches as in all other cases,
// we do not return a literal value or return expression
const expressionsToBeDeoptimized = this.expressionsToBeDeoptimizedByKey[key];
if (expressionsToBeDeoptimized) {
for (const expression of expressionsToBeDeoptimized) {
expression.deoptimizeCache();
}
}
}
}
const subPath = path.length === 1 ? UNKNOWN_PATH : path.slice(1);
for (const property of typeof key === 'string'
? (this.propertiesAndGettersByKey[key] || this.unmatchablePropertiesAndGetters).concat(
this.settersByKey[key] || this.unmatchableSetters
)
: this.allProperties) {
property.deoptimizePath(subPath);
}
this.prototypeExpression?.deoptimizePath(path.length === 1 ? [UnknownKey, UnknownKey] : path);
}
deoptimizeThisOnEventAtPath(
event: NodeEvent,
path: ObjectPath,
thisParameter: ExpressionEntity,
recursionTracker: PathTracker
): void {
const [key, ...subPath] = path;
if (
this.hasUnknownDeoptimizedProperty ||
// single paths that are deoptimized will not become getters or setters
((event === EVENT_CALLED || path.length > 1) &&
typeof key === 'string' &&
this.deoptimizedPaths[key])
) {
thisParameter.deoptimizePath(UNKNOWN_PATH);
return;
}
const [propertiesForExactMatchByKey, relevantPropertiesByKey, relevantUnmatchableProperties] =
event === EVENT_CALLED || path.length > 1
? [
this.propertiesAndGettersByKey,
this.propertiesAndGettersByKey,
this.unmatchablePropertiesAndGetters
]
: event === EVENT_ACCESSED
? [this.propertiesAndGettersByKey, this.gettersByKey, this.unmatchableGetters]
: [this.propertiesAndSettersByKey, this.settersByKey, this.unmatchableSetters];
if (typeof key === 'string') {
if (propertiesForExactMatchByKey[key]) {
const properties = relevantPropertiesByKey[key];
if (properties) {
for (const property of properties) {
property.deoptimizeThisOnEventAtPath(event, subPath, thisParameter, recursionTracker);
}
}
if (!this.immutable) {
this.thisParametersToBeDeoptimized.add(thisParameter);
}
return;
}
for (const property of relevantUnmatchableProperties) {
property.deoptimizeThisOnEventAtPath(event, subPath, thisParameter, recursionTracker);
}
if (INTEGER_REG_EXP.test(key)) {
for (const property of this.unknownIntegerProps) {
property.deoptimizeThisOnEventAtPath(event, subPath, thisParameter, recursionTracker);
}
}
} else {
for (const properties of Object.values(relevantPropertiesByKey).concat([
relevantUnmatchableProperties
])) {
for (const property of properties) {
property.deoptimizeThisOnEventAtPath(event, subPath, thisParameter, recursionTracker);
}
}
for (const property of this.unknownIntegerProps) {
property.deoptimizeThisOnEventAtPath(event, subPath, thisParameter, recursionTracker);
}
}
if (!this.immutable) {
this.thisParametersToBeDeoptimized.add(thisParameter);
}
this.prototypeExpression?.deoptimizeThisOnEventAtPath(
event,
path,
thisParameter,
recursionTracker
);
}
getLiteralValueAtPath(
path: ObjectPath,
recursionTracker: PathTracker,
origin: DeoptimizableEntity
): LiteralValueOrUnknown {
if (path.length === 0) {
return UnknownValue;
}
const key = path[0];
const expressionAtPath = this.getMemberExpressionAndTrackDeopt(key, origin);
if (expressionAtPath) {
return expressionAtPath.getLiteralValueAtPath(path.slice(1), recursionTracker, origin);
}
if (this.prototypeExpression) {
return this.prototypeExpression.getLiteralValueAtPath(path, recursionTracker, origin);
}
if (path.length === 1) {
return undefined;
}
return UnknownValue;
}
getReturnExpressionWhenCalledAtPath(
path: ObjectPath,
callOptions: CallOptions,
recursionTracker: PathTracker,
origin: DeoptimizableEntity
): ExpressionEntity {
if (path.length === 0) {
return UNKNOWN_EXPRESSION;
}
const key = path[0];
const expressionAtPath = this.getMemberExpressionAndTrackDeopt(key, origin);
if (expressionAtPath) {
return expressionAtPath.getReturnExpressionWhenCalledAtPath(
path.slice(1),
callOptions,
recursionTracker,
origin
);
}
if (this.prototypeExpression) {
return this.prototypeExpression.getReturnExpressionWhenCalledAtPath(
path,
callOptions,
recursionTracker,
origin
);
}
return UNKNOWN_EXPRESSION;
}
hasEffectsWhenAccessedAtPath(path: ObjectPath, context: HasEffectsContext): boolean {
const [key, ...subPath] = path;
if (path.length > 1) {
if (typeof key !== 'string') {
return true;
}
const expressionAtPath = this.getMemberExpression(key);
if (expressionAtPath) {
return expressionAtPath.hasEffectsWhenAccessedAtPath(subPath, context);
}
if (this.prototypeExpression) {
return this.prototypeExpression.hasEffectsWhenAccessedAtPath(path, context);
}
return true;
}
if (this.hasUnknownDeoptimizedProperty) return true;
if (typeof key === 'string') {
if (this.propertiesAndGettersByKey[key]) {
const getters = this.gettersByKey[key];
if (getters) {
for (const getter of getters) {
if (getter.hasEffectsWhenAccessedAtPath(subPath, context)) return true;
}
}
return false;
}
for (const getter of this.unmatchableGetters) {
if (getter.hasEffectsWhenAccessedAtPath(subPath, context)) {
return true;
}
}
} else {
for (const getters of Object.values(this.gettersByKey).concat([this.unmatchableGetters])) {
for (const getter of getters) {
if (getter.hasEffectsWhenAccessedAtPath(subPath, context)) return true;
}
}
}
if (this.prototypeExpression) {
return this.prototypeExpression.hasEffectsWhenAccessedAtPath(path, context);
}
return false;
}
hasEffectsWhenAssignedAtPath(path: ObjectPath, context: HasEffectsContext): boolean {
const [key, ...subPath] = path;
if (path.length > 1) {
if (typeof key !== 'string') {
return true;
}
const expressionAtPath = this.getMemberExpression(key);
if (expressionAtPath) {
return expressionAtPath.hasEffectsWhenAssignedAtPath(subPath, context);
}
if (this.prototypeExpression) {
return this.prototypeExpression.hasEffectsWhenAssignedAtPath(path, context);
}
return true;
}
if (this.hasUnknownDeoptimizedProperty) return true;
// We do not need to test for unknown properties as in that case, hasUnknownDeoptimizedProperty is true
if (typeof key === 'string') {
if (this.propertiesAndSettersByKey[key]) {
const setters = this.settersByKey[key];
if (setters) {
for (const setter of setters) {
if (setter.hasEffectsWhenAssignedAtPath(subPath, context)) return true;
}
}
return false;
}
for (const property of this.unmatchableSetters) {
if (property.hasEffectsWhenAssignedAtPath(subPath, context)) {
return true;
}
}
}
if (this.prototypeExpression) {
return this.prototypeExpression.hasEffectsWhenAssignedAtPath(path, context);
}
return false;
}
hasEffectsWhenCalledAtPath(
path: ObjectPath,
callOptions: CallOptions,
context: HasEffectsContext
): boolean {
const key = path[0];
const expressionAtPath = this.getMemberExpression(key);
if (expressionAtPath) {
return expressionAtPath.hasEffectsWhenCalledAtPath(path.slice(1), callOptions, context);
}
if (this.prototypeExpression) {
return this.prototypeExpression.hasEffectsWhenCalledAtPath(path, callOptions, context);
}
return true;
}
private buildPropertyMaps(properties: ObjectProperty[]): void {
const {
allProperties,
propertiesAndGettersByKey,
propertiesAndSettersByKey,
settersByKey,
gettersByKey,
unknownIntegerProps,
unmatchablePropertiesAndGetters,
unmatchableGetters,
unmatchableSetters
} = this;
const unmatchablePropertiesAndSetters: ExpressionEntity[] = [];
for (let index = properties.length - 1; index >= 0; index--) {
const { key, kind, property } = properties[index];
allProperties.push(property);
if (typeof key !== 'string') {
if (key === UnknownInteger) {
unknownIntegerProps.push(property);
continue;
}
if (kind === 'set') unmatchableSetters.push(property);
if (kind === 'get') unmatchableGetters.push(property);
if (kind !== 'get') unmatchablePropertiesAndSetters.push(property);
if (kind !== 'set') unmatchablePropertiesAndGetters.push(property);
} else {
if (kind === 'set') {
if (!propertiesAndSettersByKey[key]) {
propertiesAndSettersByKey[key] = [property, ...unmatchablePropertiesAndSetters];
settersByKey[key] = [property, ...unmatchableSetters];
}
} else if (kind === 'get') {
if (!propertiesAndGettersByKey[key]) {
propertiesAndGettersByKey[key] = [property, ...unmatchablePropertiesAndGetters];
gettersByKey[key] = [property, ...unmatchableGetters];
}
} else {
if (!propertiesAndSettersByKey[key]) {
propertiesAndSettersByKey[key] = [property, ...unmatchablePropertiesAndSetters];
}
if (!propertiesAndGettersByKey[key]) {
propertiesAndGettersByKey[key] = [property, ...unmatchablePropertiesAndGetters];
}
}
}
}
}
private deoptimizeCachedEntities() {
for (const expressionsToBeDeoptimized of Object.values(this.expressionsToBeDeoptimizedByKey)) {
for (const expression of expressionsToBeDeoptimized) {
expression.deoptimizeCache();
}
}
for (const expression of this.thisParametersToBeDeoptimized) {
expression.deoptimizePath(UNKNOWN_PATH);
}
}
private deoptimizeCachedIntegerEntities() {
for (const [key, expressionsToBeDeoptimized] of Object.entries(
this.expressionsToBeDeoptimizedByKey
)) {
if (INTEGER_REG_EXP.test(key)) {
for (const expression of expressionsToBeDeoptimized) {
expression.deoptimizeCache();
}
}
}
for (const expression of this.thisParametersToBeDeoptimized) {
expression.deoptimizePath(UNKNOWN_INTEGER_PATH);
}
}
private getMemberExpression(key: ObjectPathKey): ExpressionEntity | null {
if (
this.hasUnknownDeoptimizedProperty ||
typeof key !== 'string' ||
(this.hasUnknownDeoptimizedInteger && INTEGER_REG_EXP.test(key)) ||
this.deoptimizedPaths[key]
) {
return UNKNOWN_EXPRESSION;
}
const properties = this.propertiesAndGettersByKey[key];
if (properties?.length === 1) {
return properties[0];
}
if (
properties ||
this.unmatchablePropertiesAndGetters.length > 0 ||
(this.unknownIntegerProps.length && INTEGER_REG_EXP.test(key))
) {
return UNKNOWN_EXPRESSION;
}
return null;
}
private getMemberExpressionAndTrackDeopt(
key: ObjectPathKey,
origin: DeoptimizableEntity
): ExpressionEntity | null {
if (typeof key !== 'string') {
return UNKNOWN_EXPRESSION;
}
const expression = this.getMemberExpression(key);
if (!(expression === UNKNOWN_EXPRESSION || this.immutable)) {
const expressionsToBeDeoptimized = (this.expressionsToBeDeoptimizedByKey[key] =
this.expressionsToBeDeoptimizedByKey[key] || []);
expressionsToBeDeoptimized.push(origin);
}
return expression;
}
} | the_stack |
* @module Tiles
*/
import { BeTimePoint, dispose } from "@itwin/core-bentley";
import { ClipMaskXYZRangePlanes, ClipShape, ClipVector, Point3d, Transform } from "@itwin/core-geometry";
import { ColorDef, Frustum } from "@itwin/core-common";
import { IModelApp } from "../IModelApp";
import { GraphicBranch, GraphicBranchOptions } from "../render/GraphicBranch";
import { GraphicBuilder } from "../render/GraphicBuilder";
import { RenderGraphic } from "../render/RenderGraphic";
import { RenderSystem } from "../render/RenderSystem";
import { ViewingSpace } from "../ViewingSpace";
import { Viewport } from "../Viewport";
import {
RealityTileRegion,
RealityTileTree, Tile, TileContent, TileDrawArgs, TileGraphicType, TileLoadStatus, TileParams, TileRequest, TileRequestChannel, TileTreeLoadStatus, TraversalDetails, TraversalSelectionContext,
} from "./internal";
/** @internal */
export interface RealityTileParams extends TileParams {
readonly transformToRoot?: Transform;
readonly additiveRefinement?: boolean;
readonly noContentButTerminateOnSelection?: boolean;
readonly rangeCorners?: Point3d[];
readonly region?: RealityTileRegion;
}
const scratchLoadedChildren = new Array<RealityTile>();
const scratchCorners = [Point3d.createZero(), Point3d.createZero(), Point3d.createZero(), Point3d.createZero(), Point3d.createZero(), Point3d.createZero(), Point3d.createZero(), Point3d.createZero()];
const additiveRefinementThreshold = 10000; // Additive tiles (Cesium OSM tileset) are subdivided until their range diagonal falls below this threshold to ensure accurate reprojection.
const additiveRefinementDepthLimit = 20;
const scratchFrustum = new Frustum();
/**
* A specialization of tiles that represent reality tiles. 3D Tilesets and maps use this class and have their own optimized traversal and lifetime management.
* @internal
*/
export class RealityTile extends Tile {
public readonly transformToRoot?: Transform;
public readonly additiveRefinement?: boolean;
public readonly noContentButTerminateOnSelection?: boolean;
public readonly rangeCorners?: Point3d[];
public readonly region?: RealityTileRegion;
private _everDisplayed = false;
protected _reprojectionTransform?: Transform;
private _reprojectedGraphic?: RenderGraphic;
public constructor(props: RealityTileParams, tree: RealityTileTree) {
super(props, tree);
this.transformToRoot = props.transformToRoot;
this.additiveRefinement = (undefined === props.additiveRefinement) ? this.realityParent?.additiveRefinement : props.additiveRefinement;
this.noContentButTerminateOnSelection = props.noContentButTerminateOnSelection;
this.rangeCorners = props.rangeCorners;
this.region = props.region;
if (undefined === this.transformToRoot)
return;
// Can transform be non-rigid?? -- if so would have to handle (readonly) radius.
this.boundingSphere.transformBy(this.transformToRoot, this.boundingSphere);
this.transformToRoot.multiplyRange(this.range, this.range);
if (this.rangeCorners)
this.transformToRoot.multiplyPoint3dArrayInPlace(this.rangeCorners);
if (undefined !== this._contentRange)
this.transformToRoot.multiplyRange(this._contentRange, this._contentRange);
}
public get realityChildren(): RealityTile[] | undefined { return this.children as RealityTile[] | undefined; }
public get realityParent(): RealityTile { return this.parent as RealityTile; }
public get realityRoot(): RealityTileTree { return this.tree as RealityTileTree; }
public get graphicType(): TileGraphicType | undefined { return undefined; } // If undefined, use tree type.
public get maxDepth(): number { return this.realityRoot.loader.maxDepth; }
public get isPointCloud() { return this.realityRoot.loader.containsPointClouds; }
public get isLoaded() { return this.loadStatus === TileLoadStatus.Ready; } // Reality tiles may depend on secondary tiles (maps) so can ge loaded but not ready.
public override get isDisplayable(): boolean {
if (this.noContentButTerminateOnSelection)
return false;
else
return super.isDisplayable;
}
public markUsed(args: TileDrawArgs): void {
args.markUsed(this);
}
public markDisplayed(): void {
this._everDisplayed = true;
}
public isOccluded(_viewingSpace: ViewingSpace): boolean {
return false;
}
public get channel(): TileRequestChannel {
return this.realityRoot.loader.getRequestChannel(this);
}
public async requestContent(isCanceled: () => boolean): Promise<TileRequest.Response> {
return this.realityRoot.loader.requestTileContent(this, isCanceled);
}
private useAdditiveRefinementStepchildren() {
// Create additive stepchildren only if we are this tile is additive and we are repojecting and the radius exceeds the additiveRefinementThreshold.
// This criteria is currently only met by the Cesium OSM tileset.
const rangeDiagonal = this.rangeCorners ? this.rangeCorners[0].distance(this.rangeCorners[3]) : 0;
return this.additiveRefinement && this.isDisplayable && rangeDiagonal > additiveRefinementThreshold && this.depth < additiveRefinementDepthLimit && this.realityRoot.doReprojectChildren(this);
}
protected _loadChildren(resolve: (children: Tile[] | undefined) => void, reject: (error: Error) => void): void {
this.realityRoot.loader.loadChildren(this).then((children: Tile[] | undefined) => {
/* If this is a large tile is to be included additively, but we are reprojecting (Cesium OSM) then we must add step-children to display the geometry as an overly large
tile cannot be reprojected accurately. */
if (this.useAdditiveRefinementStepchildren())
this.loadAdditiveRefinementChildren((stepChildren: Tile[]) => { children = children ? children?.concat(stepChildren) : stepChildren; });
if (children)
this.realityRoot.reprojectAndResolveChildren(this, children, resolve); /* Potentially reprojecect and resolve these children */
}).catch((err) => {
reject(err);
});
}
public async readContent(data: TileRequest.ResponseData, system: RenderSystem, isCanceled?: () => boolean): Promise<TileContent> {
return this.realityRoot.loader.loadTileContent(this, data, system, isCanceled);
}
public override computeLoadPriority(viewports: Iterable<Viewport>): number {
return this.realityRoot.loader.computeTilePriority(this, viewports);
}
public getContentClip(): ClipVector | undefined {
return ClipVector.createCapture([ClipShape.createBlock(this.contentRange, ClipMaskXYZRangePlanes.All)]);
}
// Allow tile to select additional tiles (Terrain Imagery...)
public selectSecondaryTiles(_args: TileDrawArgs, _context: TraversalSelectionContext) { }
// An upsampled tile is not loadable - will override to return loadable parent.
public get loadableTile(): RealityTile { return this; }
public preloadRealityTilesAtDepth(depth: number, context: TraversalSelectionContext, args: TileDrawArgs) {
if (this.depth === depth) {
context.preload(this, args);
return;
}
this.loadChildren();
if (undefined !== this.realityChildren) {
for (const child of this.realityChildren)
child.preloadRealityTilesAtDepth(depth, context, args);
}
}
protected selectRealityChildren(context: TraversalSelectionContext, args: TileDrawArgs, traversalDetails: TraversalDetails) {
const childrenLoadStatus = this.loadChildren(); // NB: asynchronous
if (TileTreeLoadStatus.Loading === childrenLoadStatus) {
args.markChildrenLoading();
traversalDetails.childrenLoading = true;
return;
}
if (undefined !== this.realityChildren) {
const traversalChildren = this.realityRoot.getTraversalChildren(this.depth);
traversalChildren.initialize();
for (let i = 0; i < this.children!.length; i++)
this.realityChildren[i].selectRealityTiles(context, args, traversalChildren.getChildDetail(i));
traversalChildren.combine(traversalDetails);
}
}
public addBoundingGraphic(builder: GraphicBuilder, color: ColorDef) {
builder.setSymbology(color, color, 3);
let corners = this.rangeCorners ? this.rangeCorners : this.range.corners();
if (this._reprojectionTransform)
corners = this._reprojectionTransform.multiplyPoint3dArray(corners);
builder.addRangeBoxFromCorners(corners);
}
public reproject(rootReprojection: Transform) {
this._reprojectionTransform = rootReprojection;
rootReprojection.multiplyRange(this.range, this.range);
this.boundingSphere.transformBy(rootReprojection, this.boundingSphere);
if (this.contentRange)
rootReprojection.multiplyRange(this.contentRange, this.contentRange);
if (this.rangeCorners)
rootReprojection.multiplyPoint3dArrayInPlace(this.rangeCorners);
}
public allChildrenIncluded(tiles: Tile[]) {
if (this.children === undefined || tiles.length !== this.children.length)
return false;
for (const tile of tiles)
if (tile.parent !== this)
return false;
return true;
}
protected getLoadedRealityChildren(args: TileDrawArgs): boolean {
if (this._childrenLoadStatus !== TileTreeLoadStatus.Loaded || this.realityChildren === undefined)
return false;
for (const child of this.realityChildren) {
if (child.isReady && child.computeVisibilityFactor(args) > 0) {
scratchLoadedChildren.push(child);
} else if (!child.getLoadedRealityChildren(args))
return false;
}
return true;
}
public forceSelectRealityTile(): boolean { return false; }
public selectRealityTiles(context: TraversalSelectionContext, args: TileDrawArgs, traversalDetails: TraversalDetails) {
const visibility = this.computeVisibilityFactor(args);
if (visibility < 0)
return;
if (this.realityRoot.loader.forceTileLoad(this) && !this.isReady) {
context.selectOrQueue(this, args, traversalDetails); // Force loading if loader requires this tile. (cesium terrain visibility).
return;
}
if (visibility >= 1 && this.noContentButTerminateOnSelection)
return;
if (this.isDisplayable && (visibility >= 1 || this._anyChildNotFound || this.forceSelectRealityTile() || context.selectionCountExceeded)) {
if (!this.isOccluded(args.viewingSpace)) {
context.selectOrQueue(this, args, traversalDetails);
if (!this.isReady) { // This tile is visible but not loaded - Use higher resolution children if present
if (this.getLoadedRealityChildren(args))
context.select(scratchLoadedChildren, args);
scratchLoadedChildren.length = 0;
}
}
} else {
if (this.additiveRefinement && this.isDisplayable && !this.useAdditiveRefinementStepchildren())
context.selectOrQueue(this, args, traversalDetails); // With additive refinement it is necessary to display this tile along with any displayed children.
this.selectRealityChildren(context, args, traversalDetails);
if (this.isReady && (traversalDetails.childrenLoading || 0 !== traversalDetails.queuedChildren.length)) {
const minimumVisibleFactor = .25; // If the tile has not yet been displayed in this viewport -- display only if it is within 25% of visible. Avoid overly tiles popping into view unexpectedly (terrain)
if (visibility > minimumVisibleFactor || this._everDisplayed)
context.selectOrQueue(this, args, traversalDetails);
}
}
}
public purgeContents(olderThan: BeTimePoint): void {
// Discard contents of tiles that have not been "used" recently, where "used" may mean: selected/preloaded for display or content requested.
// Note we do not discard the child Tile objects themselves.
if (this.usageMarker.isExpired(olderThan))
this.disposeContents();
const children = this.realityChildren;
if (children)
for (const child of children)
child.purgeContents(olderThan);
}
public computeVisibilityFactor(args: TileDrawArgs): number {
if (this.isEmpty)
return -1;
if (this.rangeCorners)
scratchFrustum.setFromCorners(this.rangeCorners);
else
Frustum.fromRange(this.range, scratchFrustum);
if (this.isFrustumCulled(scratchFrustum, args, true, this.boundingSphere))
return -1;
// some nodes are merely for structure and don't have any geometry
if (0 === this.maximumSize)
return 0;
if (this.isLeaf)
return this.hasContentRange && this.isContentCulled(args) ? -1 : 1;
return this.maximumSize / args.getPixelSize(this);
}
public preloadTilesInFrustum(args: TileDrawArgs, context: TraversalSelectionContext, preloadSizeModifier: number) {
const visibility = this.computeVisibilityFactor(args);
if (visibility < 0)
return;
if (visibility * preloadSizeModifier > 1) {
if (this.isDisplayable)
context.preload(this, args);
} else {
const childrenLoadStatus = this.loadChildren(); // NB: asynchronous
if (TileTreeLoadStatus.Loading === childrenLoadStatus) {
args.markChildrenLoading();
} else if (undefined !== this.realityChildren) {
for (const child of this.realityChildren)
child.preloadTilesInFrustum(args, context, preloadSizeModifier);
}
}
}
protected get _anyChildNotFound(): boolean {
if (undefined !== this.children)
for (const child of this.children)
if (child.isNotFound)
return true;
return this._childrenLoadStatus === TileTreeLoadStatus.NotFound;
}
public override getSizeProjectionCorners(): Point3d[] | undefined {
if (!this.tree.isContentUnbounded)
return undefined; // For a non-global tree use the standard size algorithm.
// For global tiles (as in OSM buildings) return the range corners or X-Y corners only if bounded by region- this allows an algorithm that uses the area of the projected corners to attenuate horizon tiles.
if (!this.rangeCorners)
return this.range.corners(scratchCorners);
return this.region ? this.rangeCorners.slice(4) : this.rangeCorners;
}
public get isStepChild() { return false; }
protected loadAdditiveRefinementChildren(resolve: (children: Tile[]) => void): void {
const region = this.region;
const corners = this.rangeCorners;
if (!region || !corners)
return;
const maximumSize = this.maximumSize;
const rangeDiagonal = corners[0].distance(corners[3]);
const isLeaf = rangeDiagonal < additiveRefinementThreshold || this.depth > additiveRefinementDepthLimit;
const stepChildren = new Array<AdditiveRefinementStepChild>();
const latitudeDelta = (region.maxLatitude - region.minLatitude) / 2;
const longitudeDelta = (region.maxLongitude - region.minLongitude) / 2;
const minHeight = region.minHeight;
const maxHeight = region.maxHeight;
for (let i = 0, minLongitude = region.minLongitude, step = 0; i < 2; i++, minLongitude += longitudeDelta, step++) {
for (let j = 0, minLatitude = region.minLatitude; j < 2; j++, minLatitude += latitudeDelta) {
const childRegion = new RealityTileRegion({ minLatitude, maxLatitude: minLatitude + latitudeDelta, minLongitude, maxLongitude: minLongitude + longitudeDelta, minHeight, maxHeight });
const childRange = childRegion.getRange();
const contentId = `${this.contentId}_S${step++}`;
const childParams: RealityTileParams = { rangeCorners: childRange.corners, contentId, range: childRange.range, maximumSize, parent: this, additiveRefinement: false, isLeaf, region: childRegion };
stepChildren.push(new AdditiveRefinementStepChild(childParams, this.realityRoot));
}
}
resolve(stepChildren);
}
public override produceGraphics(): RenderGraphic | undefined {
if (undefined === this._reprojectionTransform)
return super.produceGraphics();
if (undefined === this._reprojectedGraphic && undefined !== this._graphic) {
const branch = new GraphicBranch(false);
branch.add(this._graphic);
this._reprojectedGraphic = IModelApp.renderSystem.createGraphicBranch(branch, this._reprojectionTransform);
}
return this._reprojectedGraphic;
}
public get unprojectedGraphic(): RenderGraphic | undefined {
return this._graphic;
}
public override disposeContents(): void {
super.disposeContents();
this._reprojectedGraphic = dispose(this._reprojectedGraphic);
}
}
/** When additive refinement is used (as in the Cesium OSM tileset) it is not possible to accurately reproject very large, low level tiles
* In this case we create additional "step" children (grandchildren etc. ) that will clipped portions display the their ancestor's additive geometry.
* These step children are subdivided until they are small enough to be accurately reprojected - this is controlled by the additiveRefinementThreshold (currently 2KM).
* The stepchildren do not contain any tile graphics - they just create a branch with clipping and reprojection to display their additive refinement ancestor graphics.
*/
class AdditiveRefinementStepChild extends RealityTile {
public override get isStepChild() { return true; }
private _loadableTile: RealityTile;
public constructor(props: RealityTileParams, tree: RealityTileTree) {
super(props, tree);
this._loadableTile = this.realityParent;
for (; this._loadableTile && this._loadableTile.isStepChild; this._loadableTile = this._loadableTile.realityParent)
;
}
public override get loadableTile(): RealityTile {
return this._loadableTile;
}
public override get isLoading(): boolean { return this._loadableTile.isLoading; }
public override get isQueued(): boolean { return this._loadableTile.isQueued; }
public override get isNotFound(): boolean { return this._loadableTile.isNotFound; }
public override get isReady(): boolean { return this._loadableTile.isReady; }
public override get isLoaded(): boolean { return this._loadableTile.isLoaded; }
public override get isEmpty() { return false; }
public override produceGraphics(): RenderGraphic | undefined {
if (undefined === this._graphic) {
const parentGraphics = this._loadableTile.unprojectedGraphic;
if (!parentGraphics || !this._reprojectionTransform)
return undefined;
const branch = new GraphicBranch(false);
branch.add(parentGraphics);
const renderSystem = IModelApp.renderSystem;
const branchOptions: GraphicBranchOptions = {};
if (this.rangeCorners) {
const clipPolygon = [this.rangeCorners[0], this.rangeCorners[1], this.rangeCorners[3], this.rangeCorners[2]];
branchOptions.clipVolume = renderSystem.createClipVolume(ClipVector.create([ClipShape.createShape(clipPolygon, undefined, undefined, this.tree.iModelTransform)!]));
}
this._graphic = renderSystem.createGraphicBranch(branch, this._reprojectionTransform, branchOptions);
}
return this._graphic;
}
public override markUsed(args: TileDrawArgs): void {
args.markUsed(this);
args.markUsed(this._loadableTile);
}
protected override _loadChildren(resolve: (children: Tile[] | undefined) => void, _reject: (error: Error) => void): void {
this.loadAdditiveRefinementChildren((stepChildren: Tile[]) => {
if (stepChildren)
this.realityRoot.reprojectAndResolveChildren(this, stepChildren, resolve);
});
}
} | the_stack |
import {
ITelemetryBaseEvent,
ITelemetryBaseLogger,
ITelemetryErrorEvent,
ITelemetryGenericEvent,
ITelemetryLogger,
ITelemetryPerformanceEvent,
ITelemetryProperties,
TelemetryEventPropertyType,
ITaggedTelemetryPropertyType,
TelemetryEventCategory,
} from "@fluidframework/common-definitions";
import { BaseTelemetryNullLogger, performance } from "@fluidframework/common-utils";
import {
isILoggingError,
extractLogSafeErrorProperties,
generateStack,
} from "./errorLogging";
/**
* Broad classifications to be applied to individual properties as they're prepared to be logged to telemetry.
* Please do not modify existing entries for backwards compatibility.
*/
export enum TelemetryDataTag {
/** Data containing terms from code packages that may have been dynamically loaded */
PackageData = "PackageData",
/** Personal data of a variety of classifications that pertains to the user */
UserData = "UserData",
}
export type TelemetryEventPropertyTypes = TelemetryEventPropertyType | ITaggedTelemetryPropertyType;
export interface ITelemetryLoggerPropertyBag {
[index: string]: TelemetryEventPropertyTypes | (() => TelemetryEventPropertyTypes);
}
export interface ITelemetryLoggerPropertyBags{
all?: ITelemetryLoggerPropertyBag,
error?: ITelemetryLoggerPropertyBag,
}
/**
* TelemetryLogger class contains various helper telemetry methods,
* encoding in one place schemas for various types of Fluid telemetry events.
* Creates sub-logger that appends properties to all events
*/
export abstract class TelemetryLogger implements ITelemetryLogger {
public static readonly eventNamespaceSeparator = ":";
public static formatTick(tick: number): number {
return Math.floor(tick);
}
/**
* Attempts to parse number from string.
* If fails,returns original string.
* Used to make telemetry data typed (and support math operations, like comparison),
* in places where we do expect numbers (like contentsize/duration property in http header)
*/
public static numberFromString(str: string | null | undefined): string | number | undefined {
if (str === undefined || str === null) {
return undefined;
}
const num = Number(str);
return Number.isNaN(num) ? str : num;
}
public static sanitizePkgName(name: string) {
return name.replace("@", "").replace("/", "-");
}
/**
* Take an unknown error object and add the appropriate info from it to the event. Message and stack will be copied
* over from the error object, along with other telemetry properties if it's an ILoggingError.
* @param event - Event being logged
* @param error - Error to extract info from
* @param fetchStack - Whether to fetch the current callstack if error.stack is undefined
*/
public static prepareErrorObject(event: ITelemetryBaseEvent, error: any, fetchStack: boolean) {
const { message, errorType, stack} = extractLogSafeErrorProperties(error, true /* sanitizeStack */);
// First, copy over error message, stack, and errorType directly (overwrite if present on event)
event.stack = stack;
event.error = message; // Note that the error message goes on the 'error' field
event.errorType = errorType;
if (isILoggingError(error)) {
// Add any other telemetry properties from the LoggingError
const telemetryProp = error.getTelemetryProperties();
for (const key of Object.keys(telemetryProp)) {
if (event[key] !== undefined) {
// Don't overwrite existing properties on the event
continue;
}
event[key] = telemetryProp[key];
}
}
// Collect stack if we were not able to extract it from error
if (event.stack === undefined && fetchStack) {
event.stack = generateStack();
}
}
public constructor(
protected readonly namespace?: string,
protected readonly properties?: ITelemetryLoggerPropertyBags) {
}
/**
* Send an event with the logger
*
* @param event - the event to send
*/
public abstract send(event: ITelemetryBaseEvent): void;
/**
* Send a telemetry event with the logger
*
* @param event - the event to send
* @param error - optional error object to log
*/
public sendTelemetryEvent(event: ITelemetryGenericEvent, error?: any) {
this.sendTelemetryEventCore({ ...event, category: event.category ?? "generic" }, error);
}
/**
* Send a telemetry event with the logger
*
* @param event - the event to send
* @param error - optional error object to log
*/
protected sendTelemetryEventCore(
event: ITelemetryGenericEvent & { category: TelemetryEventCategory },
error?: any)
{
const newEvent = { ...event };
if (error !== undefined) {
TelemetryLogger.prepareErrorObject(newEvent, error, false);
}
// Will include Nan & Infinity, but probably we do not care
if (typeof newEvent.duration === "number") {
newEvent.duration = TelemetryLogger.formatTick(newEvent.duration);
}
this.send(newEvent);
}
/**
* Send an error telemetry event with the logger
*
* @param event - the event to send
* @param error - optional error object to log
*/
public sendErrorEvent(event: ITelemetryErrorEvent, error?: any) {
this.sendTelemetryEventCore({ ...event, category: "error" }, error);
}
/**
* Send a performance telemetry event with the logger
*
* @param event - Event to send
* @param error - optional error object to log
*/
public sendPerformanceEvent(event: ITelemetryPerformanceEvent, error?: any): void {
const perfEvent = {
...event,
category: event.category ?? "performance",
};
this.sendTelemetryEventCore(perfEvent, error);
}
protected prepareEvent(event: ITelemetryBaseEvent): ITelemetryBaseEvent {
const includeErrorProps = event.category === "error" || event.error !== undefined;
const newEvent: ITelemetryBaseEvent = {
...event,
};
if (this.namespace !== undefined) {
newEvent.eventName = `${this.namespace}${TelemetryLogger.eventNamespaceSeparator}${newEvent.eventName}`;
}
if(this.properties) {
const properties: (undefined | ITelemetryLoggerPropertyBag)[] = [];
properties.push(this.properties.all);
if(includeErrorProps) {
properties.push(this.properties.error);
}
for(const props of properties) {
if(props !== undefined) {
for (const key of Object.keys(props)) {
if (event[key] !== undefined) {
continue;
}
const getterOrValue = props[key];
// If this throws, hopefully it is handled elsewhere
const value = typeof getterOrValue === "function" ? getterOrValue() : getterOrValue;
if (value !== undefined) {
newEvent[key] = value;
}
}
}
}
}
return newEvent;
}
}
/**
* TaggedLoggerAdapter class can add tag handling to your logger.
*/
export class TaggedLoggerAdapter implements ITelemetryBaseLogger {
public constructor(
private readonly logger: ITelemetryBaseLogger) {
}
public send(eventWithTagsMaybe: ITelemetryBaseEvent) {
const newEvent: ITelemetryBaseEvent = {
category: eventWithTagsMaybe.category,
eventName: eventWithTagsMaybe.eventName,
};
for (const key of Object.keys(eventWithTagsMaybe)) {
const taggableProp = eventWithTagsMaybe[key];
const { value, tag } = (typeof taggableProp === "object")
? taggableProp
: { value: taggableProp, tag: undefined };
switch (tag) {
case undefined:
// No tag means we can log plainly
newEvent[key] = value;
break;
case TelemetryDataTag.PackageData:
// For Microsoft applications, PackageData is safe for now
// (we don't load 3P code in 1P apps)
newEvent[key] = value;
break;
case TelemetryDataTag.UserData:
// Strip out anything tagged explicitly as PII.
// Alternate strategy would be to hash these props
newEvent[key] = "REDACTED (UserData)";
break;
default:
// If we encounter a tag we don't recognize
// then we must assume we should scrub.
newEvent[key] = "REDACTED (unknown tag)";
break;
}
}
this.logger.send(newEvent);
}
}
/**
* ChildLogger class contains various helper telemetry methods,
* encoding in one place schemas for various types of Fluid telemetry events.
* Creates sub-logger that appends properties to all events
*/
export class ChildLogger extends TelemetryLogger {
/**
* Create child logger
* @param baseLogger - Base logger to use to output events. If undefined, proper child logger
* is created, but it does not sends telemetry events anywhere.
* @param namespace - Telemetry event name prefix to add to all events
* @param properties - Base properties to add to all events
* @param propertyGetters - Getters to add additional properties to all events
*/
public static create(
baseLogger?: ITelemetryBaseLogger,
namespace?: string,
properties?: ITelemetryLoggerPropertyBags): TelemetryLogger {
// if we are creating a child of a child, rather than nest, which will increase
// the callstack overhead, just generate a new logger that includes everything from the previous
if (baseLogger instanceof ChildLogger) {
const combinedProperties: ITelemetryLoggerPropertyBags = {};
for(const extendedProps of [baseLogger.properties, properties]) {
if(extendedProps !== undefined) {
if(extendedProps.all !== undefined) {
combinedProperties.all = {
... combinedProperties.all,
... extendedProps.all,
};
}
if(extendedProps.error !== undefined) {
combinedProperties.error = {
... combinedProperties.error,
... extendedProps.error,
};
}
}
}
const combinedNamespace = baseLogger.namespace === undefined
? namespace
: namespace === undefined
? baseLogger.namespace
: `${baseLogger.namespace}${TelemetryLogger.eventNamespaceSeparator}${namespace}`;
return new ChildLogger(
baseLogger.baseLogger,
combinedNamespace,
combinedProperties,
);
}
return new ChildLogger(
baseLogger ? baseLogger : new BaseTelemetryNullLogger(),
namespace,
properties);
}
private constructor(
protected readonly baseLogger: ITelemetryBaseLogger,
namespace?: string,
properties?: ITelemetryLoggerPropertyBags) {
super(namespace, properties);
}
/**
* Send an event with the logger
*
* @param event - the event to send
*/
public send(event: ITelemetryBaseEvent): void {
this.baseLogger.send(this.prepareEvent(event));
}
}
/**
* Multi-sink logger
* Takes multiple ITelemetryBaseLogger objects (sinks) and logs all events into each sink
* Implements ITelemetryBaseLogger (through static create() method)
*/
export class MultiSinkLogger extends TelemetryLogger {
protected loggers: ITelemetryBaseLogger[] = [];
/**
* Create multiple sink logger (i.e. logger that sends events to multiple sinks)
* @param namespace - Telemetry event name prefix to add to all events
* @param properties - Base properties to add to all events
* @param propertyGetters - Getters to add additional properties to all events
*/
constructor(
namespace?: string,
properties?: ITelemetryLoggerPropertyBags) {
super(namespace, properties);
}
/**
* Add logger to send all events to
* @param logger - Logger to add
*/
public addLogger(logger?: ITelemetryBaseLogger) {
if (logger !== undefined && logger !== null) {
this.loggers.push(logger);
}
}
/**
* Send an event to the loggers
*
* @param event - the event to send to all the registered logger
*/
public send(event: ITelemetryBaseEvent): void {
const newEvent = this.prepareEvent(event);
this.loggers.forEach((logger: ITelemetryBaseLogger) => {
logger.send(newEvent);
});
}
}
/**
* Describes what events PerformanceEvent should log
* By default, all events are logged, but client can override this behavior
* For example, there is rarely a need to record start event, as we really after
* success / failure tracking, including duration (on success).
*/
export interface IPerformanceEventMarkers {
start?: true;
end?: true;
cancel?: "generic" | "error"; // tells wether to issue "generic" or "error" category cancel event
}
/**
* Helper class to log performance events
*/
export class PerformanceEvent {
public static start(logger: ITelemetryLogger, event: ITelemetryGenericEvent, markers?: IPerformanceEventMarkers) {
return new PerformanceEvent(logger, event, markers);
}
public static timedExec<T>(
logger: ITelemetryLogger,
event: ITelemetryGenericEvent,
callback: (event: PerformanceEvent) => T,
markers?: IPerformanceEventMarkers,
) {
const perfEvent = PerformanceEvent.start(logger, event, markers);
try {
const ret = callback(perfEvent);
perfEvent.autoEnd();
return ret;
} catch (error) {
perfEvent.cancel(undefined, error);
throw error;
}
}
public static async timedExecAsync<T>(
logger: ITelemetryLogger,
event: ITelemetryGenericEvent,
callback: (event: PerformanceEvent) => Promise<T>,
markers?: IPerformanceEventMarkers,
) {
const perfEvent = PerformanceEvent.start(logger, event, markers);
try {
const ret = await callback(perfEvent);
perfEvent.autoEnd();
return ret;
} catch (error) {
perfEvent.cancel(undefined, error);
throw error;
}
}
public get duration() { return performance.now() - this.startTime; }
private event?: ITelemetryGenericEvent;
private readonly startTime = performance.now();
private startMark?: string;
protected constructor(
private readonly logger: ITelemetryLogger,
event: ITelemetryGenericEvent,
private readonly markers: IPerformanceEventMarkers = {end: true, cancel: "generic"},
) {
this.event = { ...event };
if (this.markers.start) {
this.reportEvent("start");
}
if (typeof window === "object" && window != null && window.performance) {
this.startMark = `${event.eventName}-start`;
window.performance.mark(this.startMark);
}
}
public reportProgress(props?: ITelemetryProperties, eventNameSuffix: string = "update"): void {
this.reportEvent(eventNameSuffix, props);
}
private autoEnd() {
// Event might have been cancelled or ended in the callback
if (this.event && this.markers.end) {
this.reportEvent("end");
}
this.performanceEndMark();
this.event = undefined;
}
public end(props?: ITelemetryProperties): void {
this.reportEvent("end", props);
this.performanceEndMark();
this.event = undefined;
}
private performanceEndMark() {
if (this.startMark && this.event) {
const endMark = `${this.event.eventName}-end`;
window.performance.mark(endMark);
window.performance.measure(`${this.event.eventName}`, this.startMark, endMark);
this.startMark = undefined;
}
}
public cancel(props?: ITelemetryProperties, error?: any): void {
if (this.markers.cancel !== undefined) {
this.reportEvent("cancel", {category: this.markers.cancel, ...props}, error);
}
this.event = undefined;
}
/**
* Report the event, if it hasn't already been reported.
*/
public reportEvent(eventNameSuffix: string, props?: ITelemetryProperties, error?: any) {
// There are strange sequences involving multiple Promise chains
// where the event can be cancelled and then later a callback is invoked
// and the caller attempts to end directly, e.g. issue #3936. Just return.
if (!this.event) {
return;
}
const event: ITelemetryPerformanceEvent = { ...this.event, ...props };
event.eventName = `${event.eventName}_${eventNameSuffix}`;
if (eventNameSuffix !== "start") {
event.duration = this.duration;
}
this.logger.sendPerformanceEvent(event, error);
}
}
/**
* Logger that is useful for UT
* It can be used in places where logger instance is required, but events should be not send over.
*/
export class TelemetryUTLogger implements ITelemetryLogger {
public send(event: ITelemetryBaseEvent): void {
}
public sendTelemetryEvent(event: ITelemetryGenericEvent, error?: any) {
}
public sendErrorEvent(event: ITelemetryErrorEvent, error?: any) {
this.reportError("errorEvent in UT logger!", event, error);
}
public sendPerformanceEvent(event: ITelemetryPerformanceEvent, error?: any): void {
}
public logGenericError(eventName: string, error: any) {
this.reportError(`genericError in UT logger!`, { eventName }, error);
}
public logException(event: ITelemetryErrorEvent, exception: any): void {
this.reportError("exception in UT logger!", event, exception);
}
public debugAssert(condition: boolean, event?: ITelemetryErrorEvent): void {
this.reportError("debugAssert in UT logger!");
}
public shipAssert(condition: boolean, event?: ITelemetryErrorEvent): void {
this.reportError("shipAssert in UT logger!");
}
private reportError(message: string, event?: ITelemetryErrorEvent, err?: any) {
const error = new Error(message);
(error as any).error = error;
(error as any).event = event;
// report to console as exception can be eaten
console.error(message);
console.error(error);
throw error;
}
} | the_stack |
import { Percentage } from '.'
import { RBType, serializeAtomicValue } from '.'
/**
* @typeTag IntRange
* @added 0.1.4
*/
export type RGBInteger =
| 0
| 1
| 2
| 3
| 4
| 5
| 6
| 7
| 8
| 9
| 10
| 11
| 12
| 13
| 14
| 15
| 16
| 17
| 18
| 19
| 20
| 21
| 22
| 23
| 24
| 25
| 26
| 27
| 28
| 29
| 30
| 31
| 32
| 33
| 34
| 35
| 36
| 37
| 38
| 39
| 40
| 41
| 42
| 43
| 44
| 45
| 46
| 47
| 48
| 49
| 50
| 51
| 52
| 53
| 54
| 55
| 56
| 57
| 58
| 59
| 60
| 61
| 62
| 63
| 64
| 65
| 66
| 67
| 68
| 69
| 70
| 71
| 72
| 73
| 74
| 75
| 76
| 77
| 78
| 79
| 80
| 81
| 82
| 83
| 84
| 85
| 86
| 87
| 88
| 89
| 90
| 91
| 92
| 93
| 94
| 95
| 96
| 97
| 98
| 99
| 100
| 101
| 102
| 103
| 104
| 105
| 106
| 107
| 108
| 109
| 110
| 111
| 112
| 113
| 114
| 115
| 116
| 117
| 118
| 119
| 120
| 121
| 122
| 123
| 124
| 125
| 126
| 127
| 128
| 129
| 130
| 131
| 132
| 133
| 134
| 135
| 136
| 137
| 138
| 139
| 140
| 141
| 142
| 143
| 144
| 145
| 146
| 147
| 148
| 149
| 150
| 151
| 152
| 153
| 154
| 155
| 156
| 157
| 158
| 159
| 160
| 161
| 162
| 163
| 164
| 165
| 166
| 167
| 168
| 169
| 170
| 171
| 172
| 173
| 174
| 175
| 176
| 177
| 178
| 179
| 180
| 181
| 182
| 183
| 184
| 185
| 186
| 187
| 188
| 189
| 190
| 191
| 192
| 193
| 194
| 195
| 196
| 197
| 198
| 199
| 200
| 201
| 202
| 203
| 204
| 205
| 206
| 207
| 208
| 209
| 210
| 211
| 212
| 213
| 214
| 215
| 216
| 217
| 218
| 219
| 220
| 221
| 222
| 223
| 224
| 225
| 226
| 227
| 228
| 229
| 230
| 231
| 232
| 233
| 234
| 235
| 236
| 237
| 238
| 239
| 240
| 241
| 242
| 243
| 244
| 245
| 246
| 247
| 248
| 249
| 250
| 251
| 252
| 253
| 254
| 255
export type RGBInput = [RGBInteger, RGBInteger, RGBInteger] | [Percentage, Percentage, Percentage]
export type RGBAInput = [RGBInteger, RGBInteger, RGBInteger, number] | [Percentage, Percentage, Percentage, number]
export type HSLInput = [number, Percentage, Percentage]
export type HSLAInput = [number, Percentage, Percentage, number]
const serializeRGB = (x: RGB) => {
const { data } = x
const arr = (data as number[]).map(Math.round).map(serializeAtomicValue)
return `rgb(${arr[0]}, ${arr[1]}, ${arr[2]})`
}
const serializeRGBA = (x: RGBA) => {
const { data } = x
const numArray = (data as any[]).slice(0, 3).map(Math.round).map(serializeAtomicValue)
return `rgba(${numArray[0]}, ${numArray[1]}, ${numArray[2]}, ${serializeAtomicValue(data[3])})`
}
const serializeHSL = (x: HSL) => {
const { data } = x
return `hsl(${serializeAtomicValue(data[0])}, ${serializeAtomicValue(data[1])}, ${serializeAtomicValue(data[2])})`
}
const serializeHSLA = (x: HSLA) => {
const { data } = x
return `hsla(${serializeAtomicValue(data[0])}, ${serializeAtomicValue(data[1])}, ${serializeAtomicValue(
data[2],
)}, ${serializeAtomicValue(data[3])})`
}
/**
*
* A type that maps to CSS's **`<rgb()>`**
* @added 0.1.4
*/
export class RGB implements RBType<RGBInput> {
valueConstructor: Function
data: RGBInput
serialize: () => string
private constructor(data: RGBInput) {
this.data = data
this.valueConstructor = RGB.rgb
this.serialize = () => serializeRGB(this)
}
/**
* Constructs a value of type **`RGB`**.
*/
static rgb(x1: Percentage, x2: Percentage, x3: Percentage): RGB
static rgb(x1: RGBInteger, x2: RGBInteger, x3: RGBInteger): RGB
// eslint-disable-next-line @typescript-eslint/no-explicit-any
static rgb(x1: any, x2: any, x3: any): RGB {
return new RGB([x1, x2, x3])
}
}
export const rgb = RGB.rgb
/**
*
* A type that maps to CSS's **`<rgba()>`**.
* @added 0.1.4
*/
export class RGBA implements RBType<RGBAInput> {
valueConstructor: Function
data: RGBAInput
serialize: () => string
private constructor(data: RGBAInput) {
this.data = data
this.valueConstructor = RGBA.rgba
this.serialize = () => serializeRGBA(this)
}
/**
* Constructs a value of type **`RGBA`**.
*/
static rgba(x1: Percentage, x2: Percentage, x3: Percentage, x4: number): RGBA
static rgba(x1: RGBInteger, x2: RGBInteger, x3: RGBInteger, x4: number): RGBA
// eslint-disable-next-line @typescript-eslint/no-explicit-any
static rgba(x1: any, x2: any, x3: any, x4: number): RGBA {
return new RGBA([x1, x2, x3, x4])
}
}
export const rgba = RGBA.rgba
/**
*
* A type that maps to CSS's **`<hsl()>`**.
* @added 0.1.4
*/
export class HSL implements RBType<HSLInput> {
valueConstructor: Function
data: HSLInput
serialize: () => string
private constructor(data: HSLInput) {
this.data = data
this.valueConstructor = HSL.hsl
this.serialize = () => serializeHSL(this)
}
/**
* Constructs a value of type **`HSL`**.
*/
static hsl(x1: number, x2: Percentage, x3: Percentage): HSL {
return new HSL([x1, x2, x3])
}
}
export const hsl = HSL.hsl
/**
*
* A type that maps to CSS's **`<hsla()>`**.
* @added 0.1.0
*/
export class HSLA implements RBType<HSLAInput> {
valueConstructor: Function
data: HSLAInput
serialize: () => string
private constructor(data: HSLAInput) {
this.data = data
this.valueConstructor = HSLA.hsla
this.serialize = () => serializeHSLA(this)
}
/**
* Constructs a value of type **`HSLA`**.
*/
static hsla(x1: number, x2: Percentage, x3: Percentage, x4: number): HSLA {
return new HSLA([x1, x2, x3, x4])
}
}
export const hsla = HSLA.hsla
/**
*
* A type that maps to CSS's **`<hex-color>`**.
* @added 0.1.4
* @note Currently this type is just a wrapper around the **`string`** type, and is not safe, consider using other color types if you need type-safe colors.
*/
export class HEX implements RBType<string> {
valueConstructor: Function
data: string
serialize: () => string
private constructor(data: string) {
this.data = data
this.valueConstructor = HEX.hex
this.serialize = (): string => {
return this.data
}
}
/**
* Constructs a value of type **`HEX`**.
*/
static hex(x: string): HEX {
return new HEX(x)
}
}
export const hex = HEX.hex
const extendedColorKeywords = [
'aliceblue',
'antiquewhite',
'aqua',
'aquamarine',
'azure',
'beige',
'bisque',
'black',
'blanchedalmond',
'blue',
'blueviolet',
'brown',
'burlywood',
'cadetblue',
'chartreuse',
'chocolate',
'coral',
'cornflowerblue',
'cornsilk',
'crimson',
'cyan',
'darkblue',
'darkcyan',
'darkgoldenrod',
'darkgray',
'darkgreen',
'darkgrey',
'darkkhaki',
'darkmagenta',
'darkolivegreen',
'darkorange',
'darkorchid',
'darkred',
'darksalmon',
'darkseagreen',
'darkslateblue',
'darkslategray',
'darkslategrey',
'darkturquoise',
'darkviolet',
'deeppink',
'deepskyblue',
'dimgray',
'dimgrey',
'dodgerblue',
'firebrick',
'floralwhite',
'forestgreen',
'fuchsia',
'gainsboro',
'ghostwhite',
'gold',
'goldenrod',
'gray',
'green',
'greenyellow',
'grey',
'honeydew',
'hotpink',
'indianred',
'indigo',
'ivory',
'khaki',
'lavender',
'lavenderblush',
'lawngreen',
'lemonchiffon',
'lightblue',
'lightcoral',
'lightcyan',
'lightgoldenrodyellow',
'lightgray',
'lightgreen',
'lightgrey',
'lightpink',
'lightsalmon',
'lightseagreen',
'lightskyblue',
'lightslategray',
'lightslategrey',
'lightsteelblue',
'lightyellow',
'lime',
'limegreen',
'linen',
'magenta',
'maroon',
'mediumaquamarine',
'mediumblue',
'mediumorchid',
'mediumpurple',
'mediumseagreen',
'mediumslateblue',
'mediumspringgreen',
'mediumturquoise',
'mediumvioletred',
'midnightblue',
'mintcream',
'mistyrose',
'moccasin',
'navajowhite',
'navy',
'oldlace',
'olive',
'olivedrab',
'orange',
'orange',
'orangered',
'orchid',
'palegoldenrod',
'palegreen',
'paleturquoise',
'palevioletred',
'papayawhip',
'peachpuff',
'peru',
'pink',
'plum',
'powderblue',
'purple',
'red',
'rosybrown',
'royalblue',
'saddlebrown',
'salmon',
'sandybrown',
'seagreen',
'seashell',
'sienna',
'silver',
'skyblue',
'slateblue',
'slategray',
'slategrey',
'snow',
'springgreen',
'steelblue',
'tan',
'teal',
'thistle',
'tomato',
'turquoise',
'violet',
'wheat',
'white',
'whitesmoke',
'yellow',
'yellowgreen',
] as const
export type NamedColorKeyword = typeof extendedColorKeywords[number]
export const standaloneKeywords = ['currentColor', 'transparent'] as const
export type StandaloneColorKeyword = typeof standaloneKeywords[number]
/**
* @global
* A type that maps to CSS's **`<color>`** data-type
* */
export type Color = HEX | RGB | RGBA | HSL | HSLA | StandaloneColorKeyword | NamedColorKeyword
export const isHex = (x: unknown): x is HEX => x instanceof HEX
export const isRGB = (x: unknown): x is RGB => x instanceof RGB
export const isRGBA = (x: unknown): x is RGBA => x instanceof RGBA
export const isHSL = (x: unknown): x is HSL => x instanceof HSL
export const isHSLA = (x: unknown): x is HSLA => x instanceof HSLA
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const isExtendedColorKeyword = (value: any): value is NamedColorKeyword => extendedColorKeywords.includes(value)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const isStandaloneColorKeyword = (value: any): value is StandaloneColorKeyword =>
standaloneKeywords.includes(value)
/**
*
* @deprecated due to performance issues
*/
export const isColor = (value: unknown): value is Color =>
isHex(value) ||
isRGB(value) ||
isRGBA(value) ||
isHSL(value) ||
isHSLA(value) ||
isStandaloneColorKeyword(value) ||
isExtendedColorKeyword(value) | the_stack |
import {
serializable,
alias,
date,
list,
map,
mapAsArray,
object,
optional,
identifier,
reference,
primitive,
serialize,
cancelDeserialize,
deserialize,
serializeAll,
getDefaultModelSchema,
custom,
AdditionalPropArgs,
SKIP,
} from "../../"
import { observable, autorun } from "mobx"
import test = require("tape")
test("should work in typescript", (t) => {
class A {
@serializable
@observable
w
@serializable
@observable
x = 3
@observable
@serializable(primitive())
y = 4
@serializable(true)
z = 5
}
const a = new A()
let res
let called = 0
autorun(() => {
called++
res = serialize(a)
})
t.equal(called, 1)
t.deepEqual(res, { w: undefined, x: 3, y: 4, z: 5 })
a.z++ // no autorun
t.equal(a.z, 6)
a.y++
t.equal(called, 2)
t.deepEqual(res, { w: undefined, x: 3, y: 5, z: 6 })
a.x++
t.equal(called, 3)
t.deepEqual(res, { w: undefined, x: 4, y: 5, z: 6 })
const b = deserialize(A, { x: 1, y: 2, z: 3 })
t.deepEqual(serialize(b), { w: undefined, x: 1, y: 2, z: 3 })
t.ok(b instanceof A)
t.end()
})
test("typescript class with constructor params", (t) => {
class Rectangle {
@serializable
public someNumber: number
@serializable(alias("identifier", identifier()))
public id: string
@serializable(alias("desc", optional()))
public description?: string
@serializable(alias("width", true))
public width: number
@serializable(alias("height", true))
public height: number
constructor(id: string, width: number, height: number) {
this.id = id
this.width = width
this.height = height
}
public getArea(): number {
return this.width * this.height
}
}
const a = new Rectangle("A", 10, 20)
a.someNumber = 123
let json = serialize(a)
t.equal(false, json.hasOwnProperty("desc"))
t.equal(false, json.hasOwnProperty("description"))
const b = deserialize(Rectangle, json)
t.equal(a.id, b.id)
t.equal(a.width, b.width)
t.equal(a.height, b.height)
t.equal(a.someNumber, b.someNumber)
t.equal(b.getArea(), 200)
a.description = "example"
json = serialize(a)
t.equal("example", json["desc"])
t.equal(false, json.hasOwnProperty("description"))
t.end()
})
test("typescript class with only constructor params", (t) => {
class Rectangle {
@serializable(alias("identifier", identifier()))
public id: string
@serializable(alias("width", true))
public width: number
@serializable(alias("height", true))
public height: number
constructor(id: string, width: number, height: number) {
this.id = id
this.width = width
this.height = height
}
}
const a = new Rectangle("A", 10, 20)
let json = serialize(a)
const b = deserialize(Rectangle, json)
t.equal(a.id, b.id)
t.equal(a.width, b.width)
t.equal(a.height, b.height)
t.end()
})
test("[ts] it should handle prototypes", (t) => {
class A {
@serializable a = "hoi"
@serializable a2 = "oeps"
}
class B extends A {
@serializable b = "boe"
@serializable b2 = "oef"
}
t.deepEqual(serialize(new A()), {
a: "hoi",
a2: "oeps",
})
t.deepEqual(serialize(new B()), {
a: "hoi",
a2: "oeps",
b: "boe",
b2: "oef",
})
t.end()
})
test("[ts] custom prop schemas", (t) => {
function customSerializer(v) {
return v
}
function customDeserializer(jsonValue, context, oldValue) {
return jsonValue
}
function customCallbackDeserializer(jsonValue, context, oldValue, done) {
done(null, jsonValue)
}
function customAsyncDeserializer(jsonValue, context, oldValue, done) {
setTimeout(() => {
done(null, jsonValue)
}, 1)
}
class A {
@serializable(custom(customSerializer, customDeserializer)) a = "hoi"
@serializable(custom(customSerializer, customCallbackDeserializer)) a2 = "oeps"
@serializable(custom(customSerializer, customAsyncDeserializer)) a3 = "lulu"
}
let result = serialize(new A())
const initial = {
a: "hoi",
a2: "oeps",
a3: "lulu",
}
const updated = {
a: "all",
a2: "new",
a3: "lala",
}
t.deepEqual(result, initial)
deserialize(A, updated, (err, resultObj) => {
err ? t.end(err) : null
result = serialize(resultObj)
t.deepEqual(result, updated)
t.end()
})
})
test.skip("[ts] it should handle not yet defined modelschema's for classes", (t) => {
// classes are declared as var, not as function, so aren't hoisted :'(
class Comment {
@serializable(identifier()) id = 0
@serializable(true) title
}
class Message {
@serializable(list(object(Comment)))
child = []
@serializable(reference(Comment))
ref = null
}
const json = {
ref: 1,
child: [
{ id: 2, title: "foo" },
{ id: 1, title: "bar " },
],
}
const m = deserialize(Message, json)
t.equal(m.child.length, 2)
t.ok(m.child[1] === m.ref)
t.deepEqual(serialize(m), json)
t.end()
})
test("[ts] array parameters", (t) => {
class User {
@serializable nick
@serializable age
@serializable gender
@serializable(list(primitive())) hobbies
@serializable(list(primitive())) friends
}
const user = new User()
user.age = 22
user.nick = "Nick"
user.hobbies = ["debugging"]
const result = serialize(user)
t.deepEqual(result, { age: 22, nick: "Nick", gender: undefined, hobbies: ["debugging"] })
t.end()
})
test("[ts] additional lifecycle handlers 'beforeDeserialize' and 'afterDeserialize'", (t) => {
const jsonInput = {
id1: "1101",
id11: 1102,
custom1: 2,
customAsync1: "trigger error",
date1: 1534021029937,
listObj1: [
{
id1: "1121",
text1: "good data",
valid: true,
},
{
id1: "1122",
text1: "ignored",
valid: false,
},
{
id1: "1123",
text1: "good data",
valid: true,
},
null,
undefined,
1234,
"invalid",
],
listRefObj1: [
"1121",
"1122",
"1123",
"1234",
"1131",
"1132",
"1133",
"1134",
undefined,
null,
1234,
"invalid",
"1121",
],
mapObj1: {
1131: {
id1: "1131",
text1: "good data",
valid: true,
},
1132: {
id1: "1132",
text1: "ignored",
valid: false,
},
1133: {
id1: "1133",
text1: "good data",
valid: true,
},
1134: null,
1234: null,
},
mapRefObj1: {
1131: "1131",
1132: "1132",
1133: "1133",
1134: "1134",
1234: "1234",
},
mapArrayRefObj1: ["1131", "1132", "1133", "1134", "1234"],
obj1: {
id1: "1141",
text1: "yee",
valid: true,
},
primitiveNumber1: 12,
primitiveText1: "foo",
aliasText: "yo",
}
const jsonResult = {
id: "1101",
custom: 2,
customAsync: "ok now",
date: 1534021029937,
listObj: [
{
id: "1121",
text: "good data",
valid: true,
},
{
id: "1123",
text: "good data",
valid: true,
},
],
listRefObj: ["1121", "1123", "1131", "1133", "1121"],
mapObj: {
1131: {
id: "1131",
text: "good data",
valid: true,
},
1133: {
id: "1133",
text: "good data",
valid: true,
},
},
mapRefObj: {
1131: "1131",
1133: "1133",
},
mapArrayRefObj: ["1131", "1133"],
obj: {
id: "1141",
text: "yee",
valid: true,
},
primitiveNumber: 12,
primitiveText: "foo hee haa",
aliasText: "yo hee haa",
}
function customSerializer(v) {
return v
}
function customDeserializer(jsonValue, context, oldValue) {
return jsonValue
}
function customAsyncDeserializer(jsonValue, context, oldValue, done) {
if (jsonValue === "trigger error") {
done(new Error("this error should be overruled in afterDeserialize"))
} else {
done(null, jsonValue)
}
}
const renameOpts = {
beforeDeserialize: function (callback, jsonValue, jsonParentValue, propNameOrIndex) {
const jsonAttrName = propNameOrIndex + "1"
jsonValue = jsonValue || jsonParentValue[jsonAttrName]
callback(null, jsonValue)
},
}
const replaceValueOpts: AdditionalPropArgs = {
beforeDeserialize: function (callback, jsonValue, jsonParentValue, propNameOrIndex) {
const jsonAttrName = propNameOrIndex + "1"
jsonValue = (jsonValue || jsonParentValue[jsonAttrName]) + " hee"
callback(null, jsonValue)
},
afterDeserialize: function (
callback,
error,
newValue,
jsonValue,
jsonParentValue,
propNameOrIndex,
context,
propDef
) {
callback(undefined, newValue + " haa")
},
}
const resumeOnErrorOpts = {
beforeDeserialize: function (callback, jsonValue, jsonParentValue, propNameOrIndex) {
const jsonAttrName = propNameOrIndex + "1"
jsonValue = jsonValue || jsonParentValue[jsonAttrName]
callback(null, jsonValue)
},
afterDeserialize(callback, error) {
callback(null, "ok now")
},
}
const removeInvalidItemsOpts: AdditionalPropArgs = {
/**
* remove all invalid objects in lists and maps,
* also does this for reference objects asynchronously
*/
beforeDeserialize(callback, jsonValue, jsonParentValue, propNameOrIndex, context, propDef) {
let numItemsWaiting = 0
const jsonAttrName = propNameOrIndex + "1"
jsonValue = jsonValue || jsonParentValue[jsonAttrName]
let result = jsonValue
function getValidItem(inputValue, nameOrIndex) {
function onItemCallback(err) {
if (!err) {
result[nameOrIndex] = inputValue
}
numItemsWaiting -= 1
if (numItemsWaiting === 0) {
if (Array.isArray(result)) {
// clear gaps in array
result = result.filter(function () {
return true
})
}
callback(null, result)
}
}
if (inputValue) {
if (typeof inputValue === "object") {
if (inputValue.valid === true) {
onItemCallback(null)
} else {
onItemCallback(new Error("not a valid item"))
}
} else if (("" + propNameOrIndex).indexOf("Ref") >= 0) {
context.rootContext.await(
getDefaultModelSchema(SubData),
inputValue,
onItemCallback
)
} else {
onItemCallback(new Error("object expected"))
}
} else {
onItemCallback(new Error("not a valid reference"))
}
}
if (Array.isArray(jsonValue)) {
result = []
numItemsWaiting = jsonValue.length
jsonValue.forEach((value, index) => {
getValidItem(value, index)
})
} else if (typeof jsonValue === "object") {
result = {}
const keys = Object.keys(jsonValue)
numItemsWaiting = keys.length
keys.forEach((key) => {
getValidItem(jsonValue[key], key)
})
}
},
/**
* remove item in case it caused an error during deserialization
*/
afterDeserialize: function (
callback,
error,
newValue,
jsonValue,
jsonParentValue,
propNameOrIndex,
context,
propDef
) {
if (error && error.itemKey) {
// TODO: put some code here which is actually used
throw new Error("this never gets run!")
if (Array.isArray(jsonValue)) {
const nextArray = jsonValue.splice(error.itemKey, 1)
callback(error, nextArray)
} else {
const nextObj = Object.assign({}, jsonValue)
delete nextObj[error.itemKey]
callback(error, nextObj)
}
} else {
callback(error, newValue)
}
},
}
class SubData {
@serializable(identifier(renameOpts)) id
@serializable(primitive(renameOpts)) text
@serializable(primitive(renameOpts)) valid
}
class FinalData {
@serializable(identifier(renameOpts)) id
@serializable(custom(customSerializer, customDeserializer, renameOpts)) custom
@serializable(custom(customSerializer, customAsyncDeserializer, resumeOnErrorOpts))
customAsync
@serializable(date(renameOpts)) date
@serializable(list(object(SubData, renameOpts), removeInvalidItemsOpts)) listObj
@serializable(list(reference(SubData, renameOpts), removeInvalidItemsOpts)) listRefObj
@serializable(map(object(SubData, renameOpts), removeInvalidItemsOpts)) mapObj
@serializable(map(reference(SubData, renameOpts), removeInvalidItemsOpts)) mapRefObj
@serializable(mapAsArray(reference(SubData, renameOpts), "id", removeInvalidItemsOpts))
mapArrayRefObj
@serializable(object(SubData, renameOpts)) obj
@serializable(primitive(renameOpts)) primitiveNumber
@serializable(primitive(replaceValueOpts)) primitiveText
@serializable(alias("aliasText", primitive(replaceValueOpts))) aliasPrimitiveText
}
let resultIsFinal = false
const prelimResult = deserialize(FinalData, jsonInput, (err, result) => {
resultIsFinal = true
err ? t.end(err) : null
t.deepEqual(serialize(result), jsonResult)
t.end()
})
setTimeout(() => {
cancelDeserialize(prelimResult)
}, 100)
setTimeout(() => {
if (!resultIsFinal) {
t.end(new Error("deserialization canceled due to timeout"))
}
}, 1000)
})
test("[ts] @serializeAll", (t) => {
@serializeAll
class Store {
a = 3
b
}
const store = new Store()
;(store as any).c = 5
;(store as any).d = {}
t.deepEqual(serialize(store), { a: 3, c: 5 })
const store2 = deserialize(Store, { a: 2, b: 3, c: 4 })
t.equal(store2.a, 2)
t.equal(store2.b, 3)
t.equal((store2 as any).c, 4)
t.end()
})
test("[ts] @serializeAll(schema)", (t) => {
class StarValue {
@serializable(optional())
public x?: number
}
@serializeAll(/^\d\.\d+$/, StarValue)
class StoreWithStarSchema {
[key: string]: StarValue
}
const store = new StoreWithStarSchema()
store["1.4"] = { x: 1 }
store["1.77"] = {}
;(store as any).c = 5
;(store as any).d = {}
t.deepEqual(serialize(store), { "1.4": { x: 1 }, "1.77": {} })
const store2 = deserialize(StoreWithStarSchema, { "1.4": { x: 1 }, "1.77": {}, c: 4 })
t.deepEqual(store["1.4"], { x: 1 })
t.deepEqual(store["1.77"], {})
t.equal((store2 as any).c, undefined)
t.end()
})
test("[ts] @serializeAll(list schema)", (t) => {
class StarValue {
@serializable(optional())
public x?: number
}
@serializeAll(/^\d\.\d+$/, list(object(StarValue)))
class StoreWithStarSchema {
[key: string]: StarValue[]
}
const store = new StoreWithStarSchema()
store["1.4"] = [{ x: 1 }]
store["1.77"] = [{}]
;(store as any).c = 5
;(store as any).d = {}
t.deepEqual(serialize(store), { "1.4": [{ x: 1 }], "1.77": [{}] })
const store2 = deserialize(StoreWithStarSchema, { "1.4": [{ x: 1 }], "1.77": [{}], c: 4 })
t.deepEqual(store["1.4"], [{ x: 1 }])
t.deepEqual(store["1.77"], [{}])
t.equal((store2 as any).c, undefined)
t.end()
})
test("[ts] tests from serializeAll documentation", (t) => {
@serializeAll
class Store {
[key: string]: number
}
const store = new Store()
store.c = 5
;(store as any).d = {}
t.deepEqual(serialize(store), { c: 5 })
class DataType {
@serializable
x?: number
@serializable(optional())
y?: number
}
@serializeAll(/^[a-z]$/, DataType)
class ComplexStore {
[key: string]: DataType
}
const complexStore = new ComplexStore()
complexStore.a = { x: 1, y: 2 }
complexStore.b = {}
;(complexStore as any).somethingElse = 5
t.deepEqual(serialize(complexStore), { a: { x: 1, y: 2 }, b: { x: undefined } })
t.end()
})
test("list(custom(...)) with SKIP", (t) => {
class Store {
@serializable(
list(
custom(
(x) => x,
(x) => (2 === x ? SKIP : x)
)
)
)
list: number[]
}
t.deepEqual(deserialize(Store, { list: [1, 2, 3] }), { list: [1, 3] })
t.end()
}) | the_stack |
import {
AfterViewInit,
Component,
ElementRef,
EventEmitter,
HostBinding,
Inject,
Input,
LOCALE_ID,
OnChanges,
OnInit,
Output,
SimpleChanges,
ViewChild,
} from '@angular/core';
import {
add,
differenceInDays,
eachDayOfInterval,
endOfMonth,
endOfWeek,
format,
getYear,
isAfter,
isBefore,
isSameDay,
isSameMonth,
isWeekend,
Locale as LocaleDateFns,
startOfDay,
startOfMonth,
startOfWeek,
} from 'date-fns';
import { utcToZonedTime, zonedTimeToUtc } from 'date-fns-tz';
import { da, enGB, enUS } from 'date-fns/locale';
import { capitalizeFirstLetter } from '@kirbydesign/core';
import { CalendarCell } from './helpers/calendar-cell.model';
import { CalendarOptions } from './helpers/calendar-options.model';
import { CalendarHelper } from './helpers/calendar.helper';
import { CalendarYearNavigatorConfig } from './options/calendar-year-navigator-config';
export type Locale = LocaleDateFns;
interface CalendarDay {
isCurrentMonth: boolean;
isToday: boolean;
isWeekend: boolean;
isPast: boolean;
isFuture: boolean;
isDisabled: boolean;
}
enum TimeUnit {
years = 'years',
months = 'months',
weeks = 'weeks',
days = 'days',
hours = 'hours',
minutes = 'minutes',
seconds = 'seconds',
milliseconds = 'milliseconds',
}
@Component({
selector: 'kirby-calendar',
templateUrl: './calendar.component.html',
styleUrls: ['./calendar.component.scss'],
providers: [CalendarHelper],
})
export class CalendarComponent implements OnInit, AfterViewInit, OnChanges {
@ViewChild('calendarContainer', { static: false }) calendarContainer: ElementRef;
@Output() dateChange = new EventEmitter<Date>();
@Output() dateSelect = new EventEmitter<Date>();
@Output() yearSelect = new EventEmitter<number>();
@Input() timezone: 'local' | 'UTC' = 'local';
@Input() disableWeekends = false;
@Input() disablePastDates = false;
@Input() disableFutureDates = false;
@Input() alwaysEnableToday = false;
@Input() customLocales: { [key: string]: Locale } = {};
/**
* Configuration for the year navigator.
*
* Internally, calendar component:
* - bases yearNavigatorOptions.from and yearNavigatorOptions.to on todayDate if a number is provided
* - prioritizes minDate and maxDate over yearNavigatorOptions.from and yearNavigatorOptions.to
*/
@Input() yearNavigatorOptions: CalendarYearNavigatorConfig;
_month: CalendarCell[][];
_weekDays: string[];
private selectedDay: CalendarCell;
// NOTE: Internally, all Dates
// are normalized to point to local timezone midnight, regardless of the timezone
// setting.
private activeMonth: Date;
private _selectedDate: Date;
private _disabledDates: Date[] = [];
private _todayDate: Date;
private _minDate: Date;
private _maxDate: Date;
private locale: Locale;
private timeZoneName: string;
private includedLocales = { da, enGB, enUS };
get selectedDate(): Date {
return this._selectedDate;
}
@Input() set selectedDate(valueLocalOrUTC: Date) {
const value = this.normalizeDate(valueLocalOrUTC);
this.setActiveMonth(value);
if (this.hasDateChanged(value, this._selectedDate)) {
this.onSelectedDateChange(value);
this._selectedDate = value;
}
}
get disabledDates(): Date[] {
return this._disabledDates;
}
@Input() set disabledDates(value: Date[]) {
this._disabledDates = (value || []).map((date) => this.normalizeDate(date));
}
get todayDate(): Date {
return this._todayDate;
}
@Input() set todayDate(value: Date) {
this._todayDate = this.normalizeDate(value);
}
get minDate(): Date {
return this._minDate;
}
@Input() set minDate(value: Date) {
if (value && this.activeMonth && isBefore(this.activeMonth, value)) {
this.setActiveMonth(value);
}
this._minDate = this.normalizeDate(value);
}
get maxDate(): Date {
return this._maxDate;
}
@Input() set maxDate(value: Date) {
if (value && this.activeMonth && isAfter(this.activeMonth, value)) {
this.setActiveMonth(value);
}
this._maxDate = this.normalizeDate(value);
}
get activeMonthName(): string {
return capitalizeFirstLetter(this.formatWithLocale(this.activeMonth, 'MMMM'));
}
get activeYear(): string {
return this.formatWithLocale(this.activeMonth, 'yyyy');
}
/**
* Gets navigable years for year navigator based on yearNavigatorOptions.
*/
get navigableYears(): string[] {
const dateOfFirstNavigableYear =
this.minDate || this.getDateFromNavigableYear(this.yearNavigatorOptions.from);
const dateOfLastNavigableYear =
this.maxDate || this.getDateFromNavigableYear(this.yearNavigatorOptions.to);
return this.getYearsBetweenDates(dateOfFirstNavigableYear, dateOfLastNavigableYear);
}
get navigatedYear(): number {
return this.navigableYears.indexOf(this.activeYear);
}
@HostBinding('class.has-year-navigator')
get _hasYearNavigator() {
return !!this.yearNavigatorOptions;
}
constructor(private calendarHelper: CalendarHelper, @Inject(LOCALE_ID) locale: string) {
this.locale = this.mapLocale(locale);
this.timeZoneName = Intl.DateTimeFormat().resolvedOptions().timeZone;
}
private formatWithLocale(date: Date, formatString: string): string {
return format(date, formatString, {
locale: this.locale,
});
}
private mapLocale(locale: string): Locale {
if (locale === 'en') {
locale = 'enGB'; // if english locale is provided without region, we default to GB
}
locale = locale.replace('-', '');
const availableLocales = { ...this.includedLocales, ...this.customLocales };
return availableLocales[locale] || this.includedLocales.enGB; // Default to enGB if injected locale doesnt exist
}
ngOnInit() {
this._weekDays = this.getWeekDays();
this.setActiveMonth(this.selectedDate);
}
ngAfterViewInit() {
this.calendarHelper.init(
this.calendarContainer,
this.getHelperOptions(),
this._onDateSelected.bind(this),
this.onChangeMonth.bind(this)
);
}
ngOnChanges(changes: SimpleChanges): void {
if (!this.activeMonth) return;
if (
changes.disableWeekends ||
changes.disablePastDates ||
changes.disableFutureDates ||
changes.disabledDates ||
changes.minDate ||
changes.maxDate ||
changes.todayDate ||
changes.timezone
) {
this.refreshActiveMonth();
this.calendarHelper.update(this.getHelperOptions());
}
}
private setActiveMonth(date: Date = new Date()) {
if (!this.activeMonth || !isSameMonth(this.activeMonth, date)) {
this.activeMonth = startOfMonth(date);
this.refreshActiveMonth();
this.calendarHelper.update(this.getHelperOptions());
}
}
// For leniency, the component will accept any Date that points to either UTC midnight
// or to local timezone midnight although we will internally normalize the representation
// of all received dates to point to local timezone midnight.
// We currently log no warnings if the date doesn't match the timezone setting or
// if it doesn't point to midnight.
private normalizeDate(dateLocalOrUTC: Date) {
if (!dateLocalOrUTC) return;
if (startOfDay(dateLocalOrUTC).getTime() === dateLocalOrUTC.getTime()) {
// date is local timezone midnight
return dateLocalOrUTC;
}
if (
startOfDay(utcToZonedTime(dateLocalOrUTC, this.timeZoneName)).getTime() ===
utcToZonedTime(dateLocalOrUTC, this.timeZoneName).getTime()
) {
// the date is a UTC midnight; create the equivalent local timezone midnight date
const normalizedUTCdate = utcToZonedTime(dateLocalOrUTC, this.timeZoneName);
return normalizedUTCdate;
}
// does not point to midnight so we make it
return startOfDay(dateLocalOrUTC);
}
private getWeekDays(): string[] {
const now = new Date();
const week = eachDayOfInterval({
start: startOfWeek(now, { locale: this.locale }),
end: endOfWeek(now, { locale: this.locale }),
});
return week.map((date) => this.getFirstLetterOfWeekDayCapitalized(date));
}
private getFirstLetterOfWeekDayCapitalized(date: Date) {
return this.formatWithLocale(date, 'EEEEE');
}
private hasDateChanged(newDate: Date, previousDate: Date): boolean {
if (!newDate && !previousDate) {
return false;
}
if (newDate instanceof Date && !previousDate) {
return true;
}
return !isSameDay(newDate, previousDate);
}
private isDisabledDate(date: Date): boolean {
return this.disabledDates.some((disabledDate) => {
return isSameDay(disabledDate, date);
});
}
refreshActiveMonth() {
if (!this.activeMonth) return;
const monthStart = startOfMonth(this.activeMonth);
const monthEnd = endOfMonth(this.activeMonth);
const startOfFirstWeek = startOfWeek(monthStart, { locale: this.locale });
const endOfLastWeek = endOfWeek(monthEnd, { locale: this.locale });
const totalDayCount = differenceInDays(endOfLastWeek, startOfFirstWeek) + 1;
const today = this.todayDate ? startOfDay(this.todayDate) : startOfDay(new Date());
const daysArray = Array.from(Array(totalDayCount).keys());
const days: CalendarCell[] = daysArray.map((number) => {
const cellDate = add(startOfFirstWeek, { [TimeUnit.days]: number });
const day = this.getCalendarDay(cellDate, today, monthStart);
const isSelectable = this.isSelectable(day, cellDate);
const isSelected = isSameDay(this.selectedDate, cellDate);
const cell = {
date: cellDate.getDate(),
isCurrentMonth: day.isCurrentMonth,
isSelectable,
isSelected,
cssClasses: this.getCssClasses(day, isSelectable, isSelected),
};
if (isSelected) {
this.selectedDay = cell;
}
return cell;
});
this._month = this.chunk(days, 7);
}
private getCalendarDay(date: Date, today: Date, monthStart: Date): CalendarDay {
return {
isToday: isSameDay(today, date),
isPast: isBefore(date, today),
isFuture: isAfter(date, today),
isWeekend: isWeekend(date),
isCurrentMonth: isSameMonth(date, monthStart),
isDisabled: this.isDisabledDate(date),
};
}
private isSelectable(day: CalendarDay, date: Date) {
return (
(this.alwaysEnableToday && day.isToday) ||
(!day.isDisabled &&
day.isCurrentMonth &&
!(this.disableWeekends && day.isWeekend) &&
!(this.disablePastDates && day.isPast) &&
!(this.disableFutureDates && day.isFuture) &&
!(this.minDate && isBefore(date, this.minDate)) &&
!(this.maxDate && isAfter(date, this.maxDate)))
);
}
private getCssClasses(day: CalendarDay, isSelectable: boolean, isSelected: boolean) {
const cssClasses = {
'current-month': day.isCurrentMonth,
weekend: day.isWeekend,
today: day.isToday,
selectable: isSelectable,
selected: isSelected,
past: day.isPast,
disabled: day.isDisabled,
};
let cssClassString = 'day';
for (const key in cssClasses) {
if (cssClasses[key]) {
cssClassString += ' ' + key;
}
}
return cssClassString;
}
private chunk(array: any[], size: number) {
const results = [];
while (array.length) {
results.push(array.splice(0, size));
}
return results;
}
private onSelectedDateChange(newDate: Date): void {
if (this.selectedDay) {
this.selectedDay.isSelected = false;
}
const newDay = this.getCell(newDate);
if (newDay) {
newDay.isSelected = true;
this.selectedDay = newDay;
}
this.calendarHelper.setSelectedDay(newDate.getDate());
}
_onDateSelected(newDay: CalendarCell) {
if (newDay.isSelectable && newDay.date) {
let newDate = new Date(this.activeMonth);
if (this.timezone === 'UTC') {
newDate = zonedTimeToUtc(this.subtractTimezoneOffset(newDate), this.timeZoneName);
}
newDate.setDate(newDay.date);
const dateToEmit = newDate;
if (this.hasDateChanged(newDate, this._selectedDate)) {
this.onSelectedDateChange(newDate);
this._selectedDate = newDate;
this.dateChange.emit(dateToEmit);
}
this.dateSelect.emit(dateToEmit);
}
}
private onChangeMonth(direction: number) {
this._changeMonth(direction);
this.calendarHelper.update(this.getHelperOptions());
}
_changeMonth(index: number) {
this.changeActiveView(index, TimeUnit.months);
}
_changeYear(year: string) {
const yearNumeric = Number(year);
this.changeActiveView(yearNumeric - getYear(this.activeMonth), TimeUnit.years);
this.yearSelect.emit(yearNumeric);
}
private changeActiveView(index: number, unit: TimeUnit) {
if (index === 0) return;
this.activeMonth = add(this.activeMonth, { [unit]: index });
this.refreshActiveMonth();
}
get _canNavigateBack(): boolean {
const reachedPastDatesLimit =
this.disablePastDates && isSameMonth(this.activeMonth, this.todayDate);
const reachedOrExceededMinDate =
this.minDate &&
(isSameMonth(this.activeMonth, this.minDate) || isBefore(this.activeMonth, this.minDate));
return !reachedPastDatesLimit && !reachedOrExceededMinDate;
}
get _canNavigateForward(): boolean {
const reachedFutureDatesLimit =
this.disableFutureDates && isSameMonth(this.activeMonth, this.todayDate);
const reachedOrExceededMaxDate =
this.maxDate &&
(isSameMonth(this.activeMonth, this.maxDate) || isAfter(this.activeMonth, this.maxDate));
return !reachedFutureDatesLimit && !reachedOrExceededMaxDate;
}
private getCell(date: Date) {
let foundDay = null;
if (date) {
for (let week of this._month) {
foundDay = week.find((day) => {
return day.isCurrentMonth && day.date === date.getDate();
});
if (foundDay) {
break;
}
}
}
return foundDay;
}
private getHelperOptions(): CalendarOptions {
return {
canNavigateBack: this._canNavigateBack,
canNavigateForward: this._canNavigateForward,
year: this.activeYear,
monthName: this.activeMonthName,
weekDays: this._weekDays,
month: this._month,
};
}
private subtractTimezoneOffset(date: Date): Date {
const timezoneOffsetInMs = date.getTimezoneOffset() * 60 * 1000;
return new Date(date.getTime() - timezoneOffsetInMs);
}
private getDateFromNavigableYear(navigableYear: number | Date): Date {
if (navigableYear instanceof Date) return navigableYear;
const today = this.todayDate || new Date();
return new Date(today.getFullYear() + navigableYear, 0, 1);
}
private getYearsBetweenDates(startDate: Date, endDate: Date): string[] {
// Ensure years are ordered correctly if parameters are switched:
const [startYear, endYear] = [startDate.getFullYear(), endDate.getFullYear()].sort();
const numberOfYears = endYear - startYear;
return Array.from({ length: numberOfYears + 1 }, (_, i) => (startYear + i).toString());
}
} | the_stack |
import { Router } from "express"
import { celebrate, Joi } from "celebrate"
import { File } from "@lib/models/File"
import { Post } from "@lib/models/Post"
import jwt, { UserJwtRequest } from "@lib/middleware/jwt"
import * as crypto from "crypto"
import { User } from "@lib/models/User"
import secretKey from "@lib/middleware/secret-key"
import { Op } from "sequelize"
import { PostAuthor } from "@lib/models/PostAuthor"
import getHtmlFromFile from "@lib/get-html-from-drift-file"
import { getGist, createPostFromGist } from "@lib/gist"
export const posts = Router()
const postVisibilitySchema = (value: string) => {
if (
value === "public" ||
value === "private" ||
value === "unlisted" ||
value === "protected"
) {
return value
} else {
throw new Error("Invalid post visibility")
}
}
posts.post(
"/create",
jwt,
celebrate({
body: {
title: Joi.string().required(),
description: Joi.string().optional().min(0).max(256),
files: Joi.any().required(),
visibility: Joi.string()
.custom(postVisibilitySchema, "valid visibility")
.required(),
userId: Joi.string().required(),
password: Joi.string().optional(),
// expiresAt, allow to be null
expiresAt: Joi.date().optional().allow(null, ""),
parentId: Joi.string().optional().allow(null, "")
}
}),
async (req, res) => {
try {
// check if all files have titles
const files = req.body.files as File[]
const fileTitles = files.map((file) => file.title)
const missingTitles = fileTitles.filter((title) => title === "")
if (missingTitles.length > 0) {
throw new Error("All files must have a title")
}
if (files.length === 0) {
throw new Error("You must submit at least one file")
}
let hashedPassword: string = ""
if (req.body.visibility === "protected") {
hashedPassword = crypto
.createHash("sha256")
.update(req.body.password)
.digest("hex")
}
const newPost = new Post({
title: req.body.title,
description: req.body.description,
visibility: req.body.visibility,
password: hashedPassword,
expiresAt: req.body.expiresAt
})
await newPost.save()
await newPost.$add("users", req.body.userId)
const newFiles = await Promise.all(
files.map(async (file) => {
const html = getHtmlFromFile(file)
const newFile = new File({
title: file.title || "",
content: file.content,
sha: crypto
.createHash("sha256")
.update(file.content)
.digest("hex")
.toString(),
html: html || "",
userId: req.body.userId,
postId: newPost.id
})
await newFile.save()
return newFile
})
)
await Promise.all(
newFiles.map(async (file) => {
await newPost.$add("files", file.id)
await newPost.save()
})
)
if (req.body.parentId) {
// const parentPost = await Post.findOne({
// where: { id: req.body.parentId }
// })
// if (parentPost) {
// await parentPost.$add("children", newPost.id)
// await parentPost.save()
// }
const parentPost = await Post.findByPk(req.body.parentId)
if (parentPost) {
newPost.$set("parent", req.body.parentId)
await newPost.save()
} else {
throw new Error("Parent post not found")
}
}
res.json(newPost)
} catch (e) {
res.status(400).json(e)
}
}
)
posts.get("/", secretKey, async (req, res, next) => {
try {
const posts = await Post.findAll({
attributes: ["id", "title", "description", "visibility", "createdAt"]
})
res.json(posts)
} catch (e) {
next(e)
}
})
posts.get("/mine", jwt, async (req: UserJwtRequest, res, next) => {
if (!req.user) {
return res.status(401).json({ error: "Unauthorized" })
}
const page = parseInt(req.headers["x-page"]?.toString() || "1")
try {
const user = await User.findByPk(req.user.id, {
include: [
{
model: Post,
as: "posts",
include: [
{
model: File,
as: "files",
attributes: ["id", "title", "createdAt"]
},
{
model: Post,
as: "parent",
attributes: ["id", "title", "visibility"]
}
],
attributes: [
"id",
"title",
"description",
"visibility",
"createdAt",
"expiresAt"
]
}
]
})
if (!user) {
return res.status(404).json({ error: "User not found" })
}
const userPosts = user.posts
const sorted = userPosts?.sort((a, b) => {
return b.createdAt.getTime() - a.createdAt.getTime()
})
const paginated = sorted?.slice((page - 1) * 10, page * 10)
const hasMore =
paginated && sorted ? paginated.length < sorted.length : false
return res.json({
posts: paginated,
hasMore
})
} catch (error) {
next(error)
}
})
posts.get(
"/search",
jwt,
celebrate({
query: {
q: Joi.string().required()
}
}),
async (req: UserJwtRequest, res, next) => {
const { q } = req.query
if (typeof q !== "string") {
return res.status(400).json({ error: "Invalid query" })
}
try {
const posts = await Post.findAll({
where: {
[Op.or]: [
{ title: { [Op.like]: `%${q}%` } },
{ description: { [Op.like]: `%${q}%` } },
{ "$files.title$": { [Op.like]: `%${q}%` } },
{ "$files.content$": { [Op.like]: `%${q}%` } }
],
[Op.and]: [{ "$users.id$": req.user?.id || "" }]
},
include: [
{
model: File,
as: "files",
attributes: ["id", "title"]
},
{
model: User,
as: "users",
attributes: ["id", "username"]
},
{
model: Post,
as: "parent",
attributes: ["id", "title", "visibility"]
}
],
attributes: [
"id",
"title",
"description",
"visibility",
"createdAt",
"deletedAt"
],
order: [["createdAt", "DESC"]]
})
res.json(posts)
} catch (e) {
next(e)
}
}
)
const fullPostSequelizeOptions = {
include: [
{
model: File,
as: "files",
attributes: ["id", "title", "content", "sha", "createdAt", "updatedAt"]
},
{
model: User,
as: "users",
attributes: ["id", "username"]
},
{
model: Post,
as: "parent",
attributes: ["id", "title", "visibility", "createdAt"]
}
],
attributes: [
"id",
"title",
"description",
"visibility",
"createdAt",
"updatedAt",
"deletedAt",
"expiresAt"
]
}
posts.get(
"/authenticate",
celebrate({
query: {
id: Joi.string().required(),
password: Joi.string().required()
}
}),
async (req, res, next) => {
const { id, password } = req.query
const post = await Post.findByPk(id?.toString(), {
...fullPostSequelizeOptions,
attributes: [...fullPostSequelizeOptions.attributes, "password"]
})
const hash = crypto
.createHash("sha256")
.update(password?.toString() || "")
.digest("hex")
.toString()
if (hash !== post?.password) {
return res.status(400).json({ error: "Incorrect password." })
}
res.json(post)
}
)
posts.get(
"/:id",
secretKey,
celebrate({
params: {
id: Joi.string().required()
}
}),
async (req: UserJwtRequest, res, next) => {
const isUserAuthor = (post: Post) => {
return (
req.user?.id &&
post.users?.map((user) => user.id).includes(req.user?.id)
)
}
try {
const post = await Post.findByPk(req.params.id, fullPostSequelizeOptions)
if (!post) {
return res.status(404).json({ error: "Post not found" })
}
// if public or unlisted, cache
if (post.visibility === "public" || post.visibility === "unlisted") {
res.set("Cache-Control", "public, max-age=4800")
}
if (post.visibility === "public" || post?.visibility === "unlisted") {
res.json(post)
} else if (post.visibility === "private") {
jwt(req as UserJwtRequest, res, () => {
if (isUserAuthor(post)) {
res.json(post)
} else {
res.status(403).send()
}
})
} else if (post.visibility === "protected") {
// The client ensures to not send the post to the client.
// See client/pages/post/[id].tsx::getServerSideProps
res.json(post)
}
} catch (e) {
res.status(400).json(e)
}
}
)
posts.delete("/:id", jwt, async (req: UserJwtRequest, res, next) => {
try {
const post = await Post.findByPk(req.params.id, {
include: [
{
model: User,
as: "users",
attributes: ["id"]
},
{
model: File,
as: "files",
attributes: ["id"]
}
]
})
if (!post) {
return res.status(404).json({ error: "Post not found" })
}
if (req.user?.id !== post.users![0].id) {
return res.status(403).json({ error: "Forbidden" })
}
if (post.files?.length)
await Promise.all(post.files.map((file) => file.destroy()))
const postAuthor = await PostAuthor.findOne({
where: {
postId: post.id
}
})
if (postAuthor) await postAuthor.destroy()
await post.destroy()
res.json({ message: "Post deleted" })
} catch (e) {
next(e)
}
})
posts.put(
"/:id",
jwt,
celebrate({
params: {
id: Joi.string().required()
},
body: {
visibility: Joi.string()
.custom(postVisibilitySchema, "valid visibility")
.required(),
password: Joi.string().optional()
}
}),
async (req: UserJwtRequest, res, next) => {
try {
const isUserAuthor = (post: Post) => {
return (
req.user?.id &&
post.users?.map((user) => user.id).includes(req.user?.id)
)
}
const { visibility, password } = req.body
let hashedPassword: string = ""
if (visibility === "protected") {
hashedPassword = crypto
.createHash("sha256")
.update(password)
.digest("hex")
}
const { id } = req.params
const post = await Post.findByPk(id, {
include: [
{
model: User,
as: "users",
attributes: ["id"]
}
]
})
if (!post) {
return res.status(404).json({ error: "Post not found" })
}
if (!isUserAuthor(post)) {
return res
.status(403)
.json({ error: "This post does not belong to you" })
}
await Post.update(
{ password: hashedPassword, visibility },
{ where: { id } }
)
res.json({ id, visibility })
} catch (e) {
res.status(400).json(e)
}
}
)
posts.post(
"/import/gist/id/:id",
jwt,
celebrate({
body: {
visibility: Joi.string()
.custom(postVisibilitySchema, "valid visibility")
.required(),
password: Joi.string().optional(),
expiresAt: Joi.date().optional().allow(null, "")
}
}),
async (req: UserJwtRequest, res, next) => {
try {
const { id } = req.params
const { visibility, password, expiresAt } = req.body
const gist = await getGist(id)
let hashedPassword: string = ""
if (visibility === "protected") {
hashedPassword = crypto
.createHash("sha256")
.update(password)
.digest("hex")
}
const newFile = await createPostFromGist(
{
userId: req.user!.id,
visibility,
password: hashedPassword,
expiresAt
},
gist
)
return res.json(newFile)
} catch (e) {
res.status(400).json({ error: e.toString() })
}
}
) | the_stack |
import { ConditionalFilter } from "@pankod/refine-core";
import dataProvider from "../../src/index";
import "./index.mock";
describe("getList", () => {
it("correct response", async () => {
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
});
expect(response.data[0]["id"]).toBe("rec9GbXLzd6dxn4Il");
expect(response.data[0]["title"]).toBe("Hello World 3!");
expect(response.total).toBe(2);
});
it("correct sorting response", async () => {
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
sort: [
{
field: "title",
order: "desc",
},
],
});
expect(response.data[0]["id"]).toBe("recLKRioqifTrPUIz");
expect(response.data[0]["title"]).toBe("Hello World!");
expect(response.total).toBe(2);
});
it("correct equals filter for strings", async () => {
const filter = {
operator: "eq",
field: "title",
value: "Hello World!",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must equal exactly string
expect(response.data[0]["query"]).toBe('AND({title}="Hello World!")');
});
it("correct equals filter for numbers", async () => {
const filter = {
operator: "eq",
field: "age",
value: 100,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must equal exactly number
expect(response.data[0]["query"]).toBe("AND({age}=100)");
});
it("correct not equals filter for strings", async () => {
const filter = {
operator: "ne",
field: "title",
value: "Hello World!",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must not equal exactly string
expect(response.data[0]["query"]).toBe('AND({title}!="Hello World!")');
});
it("correct not equals filter for numbers", async () => {
const filter = {
operator: "ne",
field: "age",
value: 100,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must not equal exactly number
expect(response.data[0]["query"]).toBe("AND({age}!=100)");
});
it("correct less than filter", async () => {
const filter = {
operator: "lt",
field: "age",
value: 10,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must be less than value (as number)
expect(response.data[0]["query"]).toBe("AND({age}<10)");
});
it("correct less than or equal filter", async () => {
const filter = {
operator: "lte",
field: "age",
value: 10,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must be less than or equal value (as number)
expect(response.data[0]["query"]).toBe("AND({age}<=10)");
});
it("correct greater than filter", async () => {
const filter = {
operator: "gt",
field: "age",
value: 10,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must be greater than value (as number)
expect(response.data[0]["query"]).toBe("AND({age}>10)");
});
it("correct greater than or equal filter", async () => {
const filter = {
operator: "gte",
field: "age",
value: 10,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must be greater than or equal value (as number)
expect(response.data[0]["query"]).toBe("AND({age}>=10)");
});
it("correct contains filter", async () => {
const filter = {
operator: "containss",
field: "title",
value: "Hello",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// must find string in {field} - FIND returns non-zero value
expect(response.data[0]["query"]).toBe('AND(FIND("Hello",{title})!=0)');
});
it("correct not contains filter", async () => {
const filter = {
operator: "ncontainss",
field: "title",
value: "Hello",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// must not find string in {field} - FIND returns zero
expect(response.data[0]["query"]).toBe('AND(FIND("Hello",{title})=0)');
});
it("correct case-insensitive contains filter", async () => {
const filter = {
operator: "contains",
field: "title",
value: "Hello",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// must find lower-cased string in lower-cased {field} - lower-casing both values makes it case-insensitive
expect(response.data[0]["query"]).toBe(
'AND(FIND(LOWER("Hello"),LOWER({title}))!=0)',
);
});
it("correct case-insensitive not contains filter", async () => {
const filter = {
operator: "ncontains",
field: "title",
value: "Hello",
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// must not find lower-cased string in lower-cased {field} - lower-casing both values makes it case-insensitive
expect(response.data[0]["query"]).toBe(
'AND(FIND(LOWER("Hello"),LOWER({title}))=0)',
);
});
it("correct truthy null filter", async () => {
const filter = {
operator: "null",
field: "title",
value: undefined,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must be null (blank)
expect(response.data[0]["query"]).toBe("AND({title}=BLANK())");
});
it("correct falsy null filter", async () => {
const filter = {
operator: "nnull",
field: "title",
value: undefined,
} as const;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must not be null (blank)
expect(response.data[0]["query"]).toBe("AND({title}!=BLANK())");
});
it.each(["between", "nbetween"] as const)(
"fails for %s filter",
async (operator) => {
const filter = {
operator,
field: "age",
value: [10, 15],
} as const;
await expect(() => {
return dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
}).rejects.toThrow(
`Operator ${operator} is not supported for the Airtable data provider`,
);
},
);
it.each(["in", "nin"] as const)("fails for %s filter", async (operator) => {
const filter = {
operator,
field: "posts",
value: ["uuid-1", "uuid-2"],
} as const;
await expect(() => {
return dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
}).rejects.toThrow(
`Operator ${operator} is not supported for the Airtable data provider`,
);
});
it("correct 'or' conditional filter", async () => {
const filter = {
operator: "or",
value: [
{
field: "title",
operator: "eq",
value: "Silver Bullet",
},
{
field: "title",
operator: "ne",
value: "The Mythical Man Month",
},
],
} as ConditionalFilter;
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: [filter],
});
expect(response.total).toBe(1);
// {field} must either be Silver Bullet or must not be Mythical Man Month
expect(response.data[0]["query"]).toBe(
'AND(OR({title}="Silver Bullet",{title}!="The Mythical Man Month"))',
);
});
it("correct compound 'or' conditional filter", async () => {
const filters = [
{
operator: "or",
value: [
{
field: "title",
operator: "eq",
value: "Silver Bullet",
},
{
field: "title",
operator: "ne",
value: "The Mythical Man Month",
},
],
},
{
operator: "or",
value: [
{
field: "age",
operator: "gt",
value: 15,
},
{
field: "age",
operator: "lt",
value: 25,
},
],
},
] as ConditionalFilter[];
const response = await dataProvider(
"keywoytODSr6xAqfg",
"appKYl1H4k9g73sBT",
).getList({
resource: "posts",
filters: filters,
});
expect(response.total).toBe(1);
expect(response.data[0]["query"]).toBe(
'AND(OR({title}="Silver Bullet",{title}!="The Mythical Man Month"),OR({age}>15,{age}<25))',
);
});
}); | the_stack |
import { ServiceClientOptions, RequestOptions, ServiceCallback, HttpOperationResponse } from 'ms-rest';
import * as models from '../models';
/**
* @class
* Operations
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface Operations {
/**
* Lists all of the available Storage Sync Rest API operations.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<OperationEntityListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationEntityListResult>>;
/**
* Lists all of the available Storage Sync Rest API operations.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {OperationEntityListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {OperationEntityListResult} [result] - The deserialized result object if an error did not occur.
* See {@link OperationEntityListResult} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
list(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationEntityListResult>;
list(callback: ServiceCallback<models.OperationEntityListResult>): void;
list(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationEntityListResult>): void;
/**
* Lists all of the available Storage Sync Rest API operations.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<OperationEntityListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationEntityListResult>>;
/**
* Lists all of the available Storage Sync Rest API operations.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {OperationEntityListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {OperationEntityListResult} [result] - The deserialized result object if an error did not occur.
* See {@link OperationEntityListResult} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationEntityListResult>;
listNext(nextPageLink: string, callback: ServiceCallback<models.OperationEntityListResult>): void;
listNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationEntityListResult>): void;
}
/**
* @class
* StorageSyncServices
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface StorageSyncServices {
/**
* Check the give namespace name availability.
*
* @param {string} locationName The desired region for the name check.
*
* @param {object} parameters Parameters to check availability of the given
* namespace name
*
* @param {string} parameters.name The name to check for availability
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CheckNameAvailabilityResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
checkNameAvailabilityWithHttpOperationResponse(locationName: string, parameters: models.CheckNameAvailabilityParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CheckNameAvailabilityResult>>;
/**
* Check the give namespace name availability.
*
* @param {string} locationName The desired region for the name check.
*
* @param {object} parameters Parameters to check availability of the given
* namespace name
*
* @param {string} parameters.name The name to check for availability
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CheckNameAvailabilityResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CheckNameAvailabilityResult} [result] - The deserialized result object if an error did not occur.
* See {@link CheckNameAvailabilityResult} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
checkNameAvailability(locationName: string, parameters: models.CheckNameAvailabilityParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CheckNameAvailabilityResult>;
checkNameAvailability(locationName: string, parameters: models.CheckNameAvailabilityParameters, callback: ServiceCallback<models.CheckNameAvailabilityResult>): void;
checkNameAvailability(locationName: string, parameters: models.CheckNameAvailabilityParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CheckNameAvailabilityResult>): void;
/**
* Create a new StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} parameters Storage Sync Service resource name.
*
* @param {string} parameters.location Required. Gets or sets the location of
* the resource. This will be one of the supported and registered Azure Geo
* Regions (e.g. West US, East US, Southeast Asia, etc.). The geo region of a
* resource cannot be changed once it is created, but if an identical geo
* region is specified on update, the request will succeed.
*
* @param {object} [parameters.tags] Gets or sets a list of key value pairs
* that describe the resource. These tags can be used for viewing and grouping
* this resource (across resource groups). A maximum of 15 tags can be provided
* for a resource. Each tag must have a key with a length no greater than 128
* characters and a value with a length no greater than 256 characters.
*
* @param {object} [parameters.properties]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<StorageSyncService>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, parameters: models.StorageSyncServiceCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StorageSyncService>>;
/**
* Create a new StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} parameters Storage Sync Service resource name.
*
* @param {string} parameters.location Required. Gets or sets the location of
* the resource. This will be one of the supported and registered Azure Geo
* Regions (e.g. West US, East US, Southeast Asia, etc.). The geo region of a
* resource cannot be changed once it is created, but if an identical geo
* region is specified on update, the request will succeed.
*
* @param {object} [parameters.tags] Gets or sets a list of key value pairs
* that describe the resource. These tags can be used for viewing and grouping
* this resource (across resource groups). A maximum of 15 tags can be provided
* for a resource. Each tag must have a key with a length no greater than 128
* characters and a value with a length no greater than 256 characters.
*
* @param {object} [parameters.properties]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {StorageSyncService} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {StorageSyncService} [result] - The deserialized result object if an error did not occur.
* See {@link StorageSyncService} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, storageSyncServiceName: string, parameters: models.StorageSyncServiceCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StorageSyncService>;
create(resourceGroupName: string, storageSyncServiceName: string, parameters: models.StorageSyncServiceCreateParameters, callback: ServiceCallback<models.StorageSyncService>): void;
create(resourceGroupName: string, storageSyncServiceName: string, parameters: models.StorageSyncServiceCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StorageSyncService>): void;
/**
* Get a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<StorageSyncService>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StorageSyncService>>;
/**
* Get a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {StorageSyncService} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {StorageSyncService} [result] - The deserialized result object if an error did not occur.
* See {@link StorageSyncService} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StorageSyncService>;
get(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<models.StorageSyncService>): void;
get(resourceGroupName: string, storageSyncServiceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StorageSyncService>): void;
/**
* Patch a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Storage Sync Service resource.
*
* @param {object} [options.parameters.tags] The user-specified tags associated
* with the storage sync service.
*
* @param {object} [options.parameters.properties] The properties of the
* storage sync service.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<StorageSyncService>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
updateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { parameters? : models.StorageSyncServiceUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StorageSyncService>>;
/**
* Patch a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Storage Sync Service resource.
*
* @param {object} [options.parameters.tags] The user-specified tags associated
* with the storage sync service.
*
* @param {object} [options.parameters.properties] The properties of the
* storage sync service.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {StorageSyncService} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {StorageSyncService} [result] - The deserialized result object if an error did not occur.
* See {@link StorageSyncService} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
update(resourceGroupName: string, storageSyncServiceName: string, options?: { parameters? : models.StorageSyncServiceUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<models.StorageSyncService>;
update(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<models.StorageSyncService>): void;
update(resourceGroupName: string, storageSyncServiceName: string, options: { parameters? : models.StorageSyncServiceUpdateParameters, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StorageSyncService>): void;
/**
* Delete a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given StorageSyncService.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Get a StorageSyncService list by Resource group name.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<StorageSyncServiceArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByResourceGroupWithHttpOperationResponse(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StorageSyncServiceArray>>;
/**
* Get a StorageSyncService list by Resource group name.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {StorageSyncServiceArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {StorageSyncServiceArray} [result] - The deserialized result object if an error did not occur.
* See {@link StorageSyncServiceArray} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByResourceGroup(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StorageSyncServiceArray>;
listByResourceGroup(resourceGroupName: string, callback: ServiceCallback<models.StorageSyncServiceArray>): void;
listByResourceGroup(resourceGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StorageSyncServiceArray>): void;
/**
* Get a StorageSyncService list by subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<StorageSyncServiceArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySubscriptionWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.StorageSyncServiceArray>>;
/**
* Get a StorageSyncService list by subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {StorageSyncServiceArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {StorageSyncServiceArray} [result] - The deserialized result object if an error did not occur.
* See {@link StorageSyncServiceArray} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySubscription(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.StorageSyncServiceArray>;
listBySubscription(callback: ServiceCallback<models.StorageSyncServiceArray>): void;
listBySubscription(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.StorageSyncServiceArray>): void;
}
/**
* @class
* SyncGroups
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface SyncGroups {
/**
* Get a SyncGroup List.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<SyncGroupArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByStorageSyncServiceWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.SyncGroupArray>>;
/**
* Get a SyncGroup List.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {SyncGroupArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {SyncGroupArray} [result] - The deserialized result object if an error did not occur.
* See {@link SyncGroupArray} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.SyncGroupArray>;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<models.SyncGroupArray>): void;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.SyncGroupArray>): void;
/**
* Create a new SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} parameters Sync Group Body
*
* @param {object} [parameters.properties] The parameters used to create the
* sync group
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<SyncGroup>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, parameters: models.SyncGroupCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.SyncGroup>>;
/**
* Create a new SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} parameters Sync Group Body
*
* @param {object} [parameters.properties] The parameters used to create the
* sync group
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {SyncGroup} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {SyncGroup} [result] - The deserialized result object if an error did not occur.
* See {@link SyncGroup} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, parameters: models.SyncGroupCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.SyncGroup>;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, parameters: models.SyncGroupCreateParameters, callback: ServiceCallback<models.SyncGroup>): void;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, parameters: models.SyncGroupCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.SyncGroup>): void;
/**
* Get a given SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<SyncGroup>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.SyncGroup>>;
/**
* Get a given SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {SyncGroup} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {SyncGroup} [result] - The deserialized result object if an error did not occur.
* See {@link SyncGroup} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.SyncGroup>;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, callback: ServiceCallback<models.SyncGroup>): void;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.SyncGroup>): void;
/**
* Delete a given SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given SyncGroup.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
}
/**
* @class
* CloudEndpoints
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface CloudEndpoints {
/**
* Create a new CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint resource.
*
* @param {string} [parameters.storageAccountResourceId] Storage Account
* Resource Id
*
* @param {string} [parameters.azureFileShareName] Azure file share name
*
* @param {string} [parameters.storageAccountTenantId] Storage Account Tenant
* Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CloudEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CloudEndpoint>>;
/**
* Create a new CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint resource.
*
* @param {string} [parameters.storageAccountResourceId] Storage Account
* Resource Id
*
* @param {string} [parameters.azureFileShareName] Azure file share name
*
* @param {string} [parameters.storageAccountTenantId] Storage Account Tenant
* Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CloudEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CloudEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link CloudEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CloudEndpoint>;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, callback: ServiceCallback<models.CloudEndpoint>): void;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CloudEndpoint>): void;
/**
* Get a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CloudEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CloudEndpoint>>;
/**
* Get a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CloudEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CloudEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link CloudEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CloudEndpoint>;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, callback: ServiceCallback<models.CloudEndpoint>): void;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CloudEndpoint>): void;
/**
* Delete a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Get a CloudEndpoint List.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CloudEndpointArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySyncGroupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CloudEndpointArray>>;
/**
* Get a CloudEndpoint List.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CloudEndpointArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CloudEndpointArray} [result] - The deserialized result object if an error did not occur.
* See {@link CloudEndpointArray} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CloudEndpointArray>;
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, callback: ServiceCallback<models.CloudEndpointArray>): void;
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CloudEndpointArray>): void;
/**
* Pre Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
preBackupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Pre Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
preBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
preBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, callback: ServiceCallback<void>): void;
preBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Post Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<PostBackupResponse>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
postBackupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.PostBackupResponse>>;
/**
* Post Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {PostBackupResponse} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {PostBackupResponse} [result] - The deserialized result object if an error did not occur.
* See {@link PostBackupResponse} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
postBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.PostBackupResponse>;
postBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, callback: ServiceCallback<models.PostBackupResponse>): void;
postBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.PostBackupResponse>): void;
/**
* Pre Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Pre Restore partition.
*
* @param {string} [parameters.replicaGroup] Pre Restore replica group.
*
* @param {string} [parameters.requestId] Pre Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Pre Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Pre Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Pre Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.backupMetadataPropertyBag] Pre Restore backup
* metadata property bag.
*
* @param {array} [parameters.restoreFileSpec] Pre Restore restore file spec
* array.
*
* @param {number} [parameters.pauseWaitForSyncDrainTimePeriodInSeconds] Pre
* Restore pause wait for sync drain time period in seconds.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
preRestoreWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Pre Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Pre Restore partition.
*
* @param {string} [parameters.replicaGroup] Pre Restore replica group.
*
* @param {string} [parameters.requestId] Pre Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Pre Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Pre Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Pre Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.backupMetadataPropertyBag] Pre Restore backup
* metadata property bag.
*
* @param {array} [parameters.restoreFileSpec] Pre Restore restore file spec
* array.
*
* @param {number} [parameters.pauseWaitForSyncDrainTimePeriodInSeconds] Pre
* Restore pause wait for sync drain time period in seconds.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
preRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
preRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, callback: ServiceCallback<void>): void;
preRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Restore Heartbeat a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
restoreheartbeatWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Restore Heartbeat a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
restoreheartbeat(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
restoreheartbeat(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, callback: ServiceCallback<void>): void;
restoreheartbeat(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Post Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Post Restore partition.
*
* @param {string} [parameters.replicaGroup] Post Restore replica group.
*
* @param {string} [parameters.requestId] Post Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Post Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Post Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Post Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.failedFileList] Post Restore Azure failed file
* list.
*
* @param {array} [parameters.restoreFileSpec] Post Restore restore file spec
* array.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
postRestoreWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Post Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Post Restore partition.
*
* @param {string} [parameters.replicaGroup] Post Restore replica group.
*
* @param {string} [parameters.requestId] Post Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Post Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Post Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Post Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.failedFileList] Post Restore Azure failed file
* list.
*
* @param {array} [parameters.restoreFileSpec] Post Restore restore file spec
* array.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
postRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
postRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, callback: ServiceCallback<void>): void;
postRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Create a new CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint resource.
*
* @param {string} [parameters.storageAccountResourceId] Storage Account
* Resource Id
*
* @param {string} [parameters.azureFileShareName] Azure file share name
*
* @param {string} [parameters.storageAccountTenantId] Storage Account Tenant
* Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CloudEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginCreateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CloudEndpoint>>;
/**
* Create a new CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint resource.
*
* @param {string} [parameters.storageAccountResourceId] Storage Account
* Resource Id
*
* @param {string} [parameters.azureFileShareName] Azure file share name
*
* @param {string} [parameters.storageAccountTenantId] Storage Account Tenant
* Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CloudEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CloudEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link CloudEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CloudEndpoint>;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, callback: ServiceCallback<models.CloudEndpoint>): void;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.CloudEndpointCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CloudEndpoint>): void;
/**
* Delete a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginDeleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, callback: ServiceCallback<void>): void;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Pre Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginPreBackupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Pre Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginPreBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginPreBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, callback: ServiceCallback<void>): void;
beginPreBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Post Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<PostBackupResponse>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginPostBackupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.PostBackupResponse>>;
/**
* Post Backup a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Backup request.
*
* @param {string} [parameters.azureFileShare] Azure File Share.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {PostBackupResponse} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {PostBackupResponse} [result] - The deserialized result object if an error did not occur.
* See {@link PostBackupResponse} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginPostBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.PostBackupResponse>;
beginPostBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, callback: ServiceCallback<models.PostBackupResponse>): void;
beginPostBackup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.BackupRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.PostBackupResponse>): void;
/**
* Pre Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Pre Restore partition.
*
* @param {string} [parameters.replicaGroup] Pre Restore replica group.
*
* @param {string} [parameters.requestId] Pre Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Pre Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Pre Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Pre Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.backupMetadataPropertyBag] Pre Restore backup
* metadata property bag.
*
* @param {array} [parameters.restoreFileSpec] Pre Restore restore file spec
* array.
*
* @param {number} [parameters.pauseWaitForSyncDrainTimePeriodInSeconds] Pre
* Restore pause wait for sync drain time period in seconds.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginPreRestoreWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Pre Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Pre Restore partition.
*
* @param {string} [parameters.replicaGroup] Pre Restore replica group.
*
* @param {string} [parameters.requestId] Pre Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Pre Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Pre Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Pre Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.backupMetadataPropertyBag] Pre Restore backup
* metadata property bag.
*
* @param {array} [parameters.restoreFileSpec] Pre Restore restore file spec
* array.
*
* @param {number} [parameters.pauseWaitForSyncDrainTimePeriodInSeconds] Pre
* Restore pause wait for sync drain time period in seconds.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginPreRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginPreRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, callback: ServiceCallback<void>): void;
beginPreRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PreRestoreRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Post Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Post Restore partition.
*
* @param {string} [parameters.replicaGroup] Post Restore replica group.
*
* @param {string} [parameters.requestId] Post Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Post Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Post Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Post Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.failedFileList] Post Restore Azure failed file
* list.
*
* @param {array} [parameters.restoreFileSpec] Post Restore restore file spec
* array.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginPostRestoreWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Post Restore a given CloudEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} cloudEndpointName Name of Cloud Endpoint object.
*
* @param {object} parameters Body of Cloud Endpoint object.
*
* @param {string} [parameters.partition] Post Restore partition.
*
* @param {string} [parameters.replicaGroup] Post Restore replica group.
*
* @param {string} [parameters.requestId] Post Restore request id.
*
* @param {string} [parameters.azureFileShareUri] Post Restore Azure file share
* uri.
*
* @param {string} [parameters.status] Post Restore Azure status.
*
* @param {string} [parameters.sourceAzureFileShareUri] Post Restore Azure
* source azure file share uri.
*
* @param {string} [parameters.failedFileList] Post Restore Azure failed file
* list.
*
* @param {array} [parameters.restoreFileSpec] Post Restore restore file spec
* array.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginPostRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginPostRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, callback: ServiceCallback<void>): void;
beginPostRestore(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, cloudEndpointName: string, parameters: models.PostRestoreRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
}
/**
* @class
* ServerEndpoints
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface ServerEndpoints {
/**
* Create a new ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Server Endpoint object.
*
* @param {string} [parameters.serverLocalPath] Server Local path.
*
* @param {string} [parameters.cloudTiering] Cloud Tiering. Possible values
* include: 'on', 'off'
*
* @param {number} [parameters.volumeFreeSpacePercent] Level of free space to
* be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [parameters.tierFilesOlderThanDays] Tier files older than
* days.
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {string} [parameters.serverResourceId] Server Resource Id.
*
* @param {string} [parameters.offlineDataTransfer] Offline data transfer.
* Possible values include: 'on', 'off'
*
* @param {string} [parameters.offlineDataTransferShareName] Offline data
* transfer share name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpoint>>;
/**
* Create a new ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Server Endpoint object.
*
* @param {string} [parameters.serverLocalPath] Server Local path.
*
* @param {string} [parameters.cloudTiering] Cloud Tiering. Possible values
* include: 'on', 'off'
*
* @param {number} [parameters.volumeFreeSpacePercent] Level of free space to
* be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [parameters.tierFilesOlderThanDays] Tier files older than
* days.
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {string} [parameters.serverResourceId] Server Resource Id.
*
* @param {string} [parameters.offlineDataTransfer] Offline data transfer.
* Possible values include: 'on', 'off'
*
* @param {string} [parameters.offlineDataTransferShareName] Offline data
* transfer share name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpoint>;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, callback: ServiceCallback<models.ServerEndpoint>): void;
create(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpoint>): void;
/**
* Patch a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Any of the properties applicable in PUT
* request.
*
* @param {string} [options.parameters.cloudTiering] Cloud Tiering. Possible
* values include: 'on', 'off'
*
* @param {number} [options.parameters.volumeFreeSpacePercent] Level of free
* space to be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [options.parameters.tierFilesOlderThanDays] Tier files older
* than days.
*
* @param {string} [options.parameters.offlineDataTransfer] Offline data
* transfer. Possible values include: 'on', 'off'
*
* @param {string} [options.parameters.offlineDataTransferShareName] Offline
* data transfer share name
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
updateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpoint>>;
/**
* Patch a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Any of the properties applicable in PUT
* request.
*
* @param {string} [options.parameters.cloudTiering] Cloud Tiering. Possible
* values include: 'on', 'off'
*
* @param {number} [options.parameters.volumeFreeSpacePercent] Level of free
* space to be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [options.parameters.tierFilesOlderThanDays] Tier files older
* than days.
*
* @param {string} [options.parameters.offlineDataTransfer] Offline data
* transfer. Possible values include: 'on', 'off'
*
* @param {string} [options.parameters.offlineDataTransferShareName] Offline
* data transfer share name
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
update(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpoint>;
update(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, callback: ServiceCallback<models.ServerEndpoint>): void;
update(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpoint>): void;
/**
* Get a ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpoint>>;
/**
* Get a ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpoint>;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, callback: ServiceCallback<models.ServerEndpoint>): void;
get(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpoint>): void;
/**
* Delete a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Get a ServerEndpoint list.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpointArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySyncGroupWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpointArray>>;
/**
* Get a ServerEndpoint list.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpointArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpointArray} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpointArray} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpointArray>;
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, callback: ServiceCallback<models.ServerEndpointArray>): void;
listBySyncGroup(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpointArray>): void;
/**
* Recall a server endpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Recall Action object.
*
* @param {string} [parameters.pattern] Pattern of the files.
*
* @param {string} [parameters.recallPath] Recall path.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
recallActionWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Recall a server endpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Recall Action object.
*
* @param {string} [parameters.pattern] Pattern of the files.
*
* @param {string} [parameters.recallPath] Recall path.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
recallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
recallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, callback: ServiceCallback<void>): void;
recallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Create a new ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Server Endpoint object.
*
* @param {string} [parameters.serverLocalPath] Server Local path.
*
* @param {string} [parameters.cloudTiering] Cloud Tiering. Possible values
* include: 'on', 'off'
*
* @param {number} [parameters.volumeFreeSpacePercent] Level of free space to
* be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [parameters.tierFilesOlderThanDays] Tier files older than
* days.
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {string} [parameters.serverResourceId] Server Resource Id.
*
* @param {string} [parameters.offlineDataTransfer] Offline data transfer.
* Possible values include: 'on', 'off'
*
* @param {string} [parameters.offlineDataTransferShareName] Offline data
* transfer share name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginCreateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpoint>>;
/**
* Create a new ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Server Endpoint object.
*
* @param {string} [parameters.serverLocalPath] Server Local path.
*
* @param {string} [parameters.cloudTiering] Cloud Tiering. Possible values
* include: 'on', 'off'
*
* @param {number} [parameters.volumeFreeSpacePercent] Level of free space to
* be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [parameters.tierFilesOlderThanDays] Tier files older than
* days.
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {string} [parameters.serverResourceId] Server Resource Id.
*
* @param {string} [parameters.offlineDataTransfer] Offline data transfer.
* Possible values include: 'on', 'off'
*
* @param {string} [parameters.offlineDataTransferShareName] Offline data
* transfer share name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpoint>;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, callback: ServiceCallback<models.ServerEndpoint>): void;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.ServerEndpointCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpoint>): void;
/**
* Patch a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Any of the properties applicable in PUT
* request.
*
* @param {string} [options.parameters.cloudTiering] Cloud Tiering. Possible
* values include: 'on', 'off'
*
* @param {number} [options.parameters.volumeFreeSpacePercent] Level of free
* space to be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [options.parameters.tierFilesOlderThanDays] Tier files older
* than days.
*
* @param {string} [options.parameters.offlineDataTransfer] Offline data
* transfer. Possible values include: 'on', 'off'
*
* @param {string} [options.parameters.offlineDataTransferShareName] Offline
* data transfer share name
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<ServerEndpoint>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginUpdateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.ServerEndpoint>>;
/**
* Patch a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.parameters] Any of the properties applicable in PUT
* request.
*
* @param {string} [options.parameters.cloudTiering] Cloud Tiering. Possible
* values include: 'on', 'off'
*
* @param {number} [options.parameters.volumeFreeSpacePercent] Level of free
* space to be maintained by Cloud Tiering if it is enabled.
*
* @param {number} [options.parameters.tierFilesOlderThanDays] Tier files older
* than days.
*
* @param {string} [options.parameters.offlineDataTransfer] Offline data
* transfer. Possible values include: 'on', 'off'
*
* @param {string} [options.parameters.offlineDataTransferShareName] Offline
* data transfer share name
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {ServerEndpoint} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {ServerEndpoint} [result] - The deserialized result object if an error did not occur.
* See {@link ServerEndpoint} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginUpdate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }): Promise<models.ServerEndpoint>;
beginUpdate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, callback: ServiceCallback<models.ServerEndpoint>): void;
beginUpdate(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options: { parameters? : models.ServerEndpointUpdateParameters, customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.ServerEndpoint>): void;
/**
* Delete a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginDeleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a given ServerEndpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, callback: ServiceCallback<void>): void;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Recall a server endpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Recall Action object.
*
* @param {string} [parameters.pattern] Pattern of the files.
*
* @param {string} [parameters.recallPath] Recall path.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginRecallActionWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Recall a server endpoint.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} syncGroupName Name of Sync Group resource.
*
* @param {string} serverEndpointName Name of Server Endpoint object.
*
* @param {object} parameters Body of Recall Action object.
*
* @param {string} [parameters.pattern] Pattern of the files.
*
* @param {string} [parameters.recallPath] Recall path.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginRecallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginRecallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, callback: ServiceCallback<void>): void;
beginRecallAction(resourceGroupName: string, storageSyncServiceName: string, syncGroupName: string, serverEndpointName: string, parameters: models.RecallActionParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
}
/**
* @class
* RegisteredServers
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface RegisteredServers {
/**
* Get a given registered server list.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<RegisteredServerArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByStorageSyncServiceWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.RegisteredServerArray>>;
/**
* Get a given registered server list.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {RegisteredServerArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {RegisteredServerArray} [result] - The deserialized result object if an error did not occur.
* See {@link RegisteredServerArray} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.RegisteredServerArray>;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<models.RegisteredServerArray>): void;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.RegisteredServerArray>): void;
/**
* Get a given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<RegisteredServer>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.RegisteredServer>>;
/**
* Get a given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {RegisteredServer} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {RegisteredServer} [result] - The deserialized result object if an error did not occur.
* See {@link RegisteredServer} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.RegisteredServer>;
get(resourceGroupName: string, storageSyncServiceName: string, serverId: string, callback: ServiceCallback<models.RegisteredServer>): void;
get(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.RegisteredServer>): void;
/**
* Add a new registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} parameters Body of Registered Server object.
*
* @param {string} [parameters.serverCertificate] Registered Server Certificate
*
* @param {string} [parameters.agentVersion] Registered Server Agent Version
*
* @param {string} [parameters.serverOSVersion] Registered Server OS Version
*
* @param {string} [parameters.lastHeartBeat] Registered Server last heart beat
*
* @param {string} [parameters.serverRole] Registered Server serverRole
*
* @param {string} [parameters.clusterId] Registered Server clusterId
*
* @param {string} [parameters.clusterName] Registered Server clusterName
*
* @param {string} [parameters.serverId] Registered Server serverId
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<RegisteredServer>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.RegisteredServer>>;
/**
* Add a new registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} parameters Body of Registered Server object.
*
* @param {string} [parameters.serverCertificate] Registered Server Certificate
*
* @param {string} [parameters.agentVersion] Registered Server Agent Version
*
* @param {string} [parameters.serverOSVersion] Registered Server OS Version
*
* @param {string} [parameters.lastHeartBeat] Registered Server last heart beat
*
* @param {string} [parameters.serverRole] Registered Server serverRole
*
* @param {string} [parameters.clusterId] Registered Server clusterId
*
* @param {string} [parameters.clusterName] Registered Server clusterName
*
* @param {string} [parameters.serverId] Registered Server serverId
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {RegisteredServer} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {RegisteredServer} [result] - The deserialized result object if an error did not occur.
* See {@link RegisteredServer} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.RegisteredServer>;
create(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, callback: ServiceCallback<models.RegisteredServer>): void;
create(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.RegisteredServer>): void;
/**
* Delete the given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete the given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Triggers Server certificate rollover.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId Server Id
*
* @param {object} parameters Body of Trigger Rollover request.
*
* @param {string} [parameters.serverCertificate] Certificate Data
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
triggerRolloverWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Triggers Server certificate rollover.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId Server Id
*
* @param {object} parameters Body of Trigger Rollover request.
*
* @param {string} [parameters.serverCertificate] Certificate Data
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
triggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
triggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, callback: ServiceCallback<void>): void;
triggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Add a new registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} parameters Body of Registered Server object.
*
* @param {string} [parameters.serverCertificate] Registered Server Certificate
*
* @param {string} [parameters.agentVersion] Registered Server Agent Version
*
* @param {string} [parameters.serverOSVersion] Registered Server OS Version
*
* @param {string} [parameters.lastHeartBeat] Registered Server last heart beat
*
* @param {string} [parameters.serverRole] Registered Server serverRole
*
* @param {string} [parameters.clusterId] Registered Server clusterId
*
* @param {string} [parameters.clusterName] Registered Server clusterName
*
* @param {string} [parameters.serverId] Registered Server serverId
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<RegisteredServer>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginCreateWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.RegisteredServer>>;
/**
* Add a new registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} parameters Body of Registered Server object.
*
* @param {string} [parameters.serverCertificate] Registered Server Certificate
*
* @param {string} [parameters.agentVersion] Registered Server Agent Version
*
* @param {string} [parameters.serverOSVersion] Registered Server OS Version
*
* @param {string} [parameters.lastHeartBeat] Registered Server last heart beat
*
* @param {string} [parameters.serverRole] Registered Server serverRole
*
* @param {string} [parameters.clusterId] Registered Server clusterId
*
* @param {string} [parameters.clusterName] Registered Server clusterName
*
* @param {string} [parameters.serverId] Registered Server serverId
*
* @param {string} [parameters.friendlyName] Friendly Name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {RegisteredServer} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {RegisteredServer} [result] - The deserialized result object if an error did not occur.
* See {@link RegisteredServer} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginCreate(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.RegisteredServer>;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, callback: ServiceCallback<models.RegisteredServer>): void;
beginCreate(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.RegisteredServerCreateParameters, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.RegisteredServer>): void;
/**
* Delete the given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginDeleteMethodWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete the given registered server.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId GUID identifying the on-premises server.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, callback: ServiceCallback<void>): void;
beginDeleteMethod(resourceGroupName: string, storageSyncServiceName: string, serverId: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Triggers Server certificate rollover.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId Server Id
*
* @param {object} parameters Body of Trigger Rollover request.
*
* @param {string} [parameters.serverCertificate] Certificate Data
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginTriggerRolloverWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Triggers Server certificate rollover.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} serverId Server Id
*
* @param {object} parameters Body of Trigger Rollover request.
*
* @param {string} [parameters.serverCertificate] Certificate Data
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginTriggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginTriggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, callback: ServiceCallback<void>): void;
beginTriggerRollover(resourceGroupName: string, storageSyncServiceName: string, serverId: string, parameters: models.TriggerRolloverRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
}
/**
* @class
* Workflows
* __NOTE__: An instance of this class is automatically created for an
* instance of the StorageSyncManagementClient.
*/
export interface Workflows {
/**
* Get a Workflow List
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkflowArray>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByStorageSyncServiceWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkflowArray>>;
/**
* Get a Workflow List
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkflowArray} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkflowArray} [result] - The deserialized result object if an error did not occur.
* See {@link WorkflowArray} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkflowArray>;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, callback: ServiceCallback<models.WorkflowArray>): void;
listByStorageSyncService(resourceGroupName: string, storageSyncServiceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkflowArray>): void;
/**
* Get Workflows resource
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} workflowId workflow Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workflow>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workflow>>;
/**
* Get Workflows resource
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} workflowId workflow Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workflow} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workflow} [result] - The deserialized result object if an error did not occur.
* See {@link Workflow} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workflow>;
get(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, callback: ServiceCallback<models.Workflow>): void;
get(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workflow>): void;
/**
* Abort the given workflow.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} workflowId workflow Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
abortWithHttpOperationResponse(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Abort the given workflow.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} storageSyncServiceName Name of Storage Sync Service
* resource.
*
* @param {string} workflowId workflow Id
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
abort(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
abort(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, callback: ServiceCallback<void>): void;
abort(resourceGroupName: string, storageSyncServiceName: string, workflowId: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
} | the_stack |
import { DMMF } from '@prisma/generator-helper'
import dedent from 'dindist'
import * as OS from 'os'
import { LiteralUnion } from 'type-fest'
import { StandardGraphQLScalarType, StandardGraphQLScalarTypes } from '../../helpers/graphql'
import { PrismaScalarType } from '../../helpers/prisma'
import { allCasesHandled } from '../../helpers/utils'
import { PrismaDmmf } from '../../lib/prisma-dmmf'
import { Gentime } from '../gentime/settingsSingleton'
import { jsDocForEnum, jsDocForField, jsDocForModel } from '../helpers/JSDocTemplates'
import { ModuleSpec } from '../types'
export const createModuleSpec = (dmmf: DMMF.Document, settings: Gentime.Settings): ModuleSpec => {
return {
fileName: 'index.d.ts',
content: dedent`
${renderTypeScriptDeclarationForDocumentModels(dmmf, settings)}
`,
}
}
const NO_ENUMS_DEFINED_COMMENT = dedent`
// N/A –– You have not defined any enums in your Prisma schema file.
`
const NO_MODELS_DEFINED_COMMENT = dedent`
// N/A –– You have not defined any models in your Prisma schema file.
`
export function renderTypeScriptDeclarationForDocumentModels(
dmmf: DMMF.Document,
settings: Gentime.Settings
): string {
const models = dmmf.datamodel.models
const enums = dmmf.datamodel.enums
return (
dedent`
import * as NexusCore from 'nexus/dist/core'
//
//
// TYPES
// TYPES
// TYPES
// TYPES
//
//
// Models
${
models.length === 0
? NO_MODELS_DEFINED_COMMENT
: models.map((model) => renderTypeScriptDeclarationForModel(model, settings)).join('\n\n')
}
// Enums
${
enums.length === 0
? NO_ENUMS_DEFINED_COMMENT
: enums.map((enum_) => renderTypeScriptDeclarationForEnum(enum_, settings)).join('\n\n')
}
//
//
// TERMS
// TERMS
// TERMS
// TERMS
//
//
//
//
// EXPORTS: PRISMA MODELS
// EXPORTS: PRISMA MODELS
// EXPORTS: PRISMA MODELS
// EXPORTS: PRISMA MODELS
//
//
${
models.length === 0
? NO_MODELS_DEFINED_COMMENT
: models
.map((model) => {
return dedent`
export const ${model.name}: ${model.name}
`
})
.join('\n\n')
}
//
//
// EXPORTS: PRISMA ENUMS
// EXPORTS: PRISMA ENUMS
// EXPORTS: PRISMA ENUMS
// EXPORTS: PRISMA ENUMS
//
//
${
enums.length === 0
? NO_ENUMS_DEFINED_COMMENT
: enums
.map((enum_) => {
return dedent`
export const ${enum_.name}: ${enum_.name}
`
})
.join('\n\n')
}
//
//
// EXPORTS: OTHER
// EXPORTS: OTHER
// EXPORTS: OTHER
// EXPORTS: OTHER
//
//
import { Runtime } from ${
settings.data.output.directory === 'default'
? `'../generator/runtime/settingsSingleton'`
: `'nexus-prisma/dist-cjs/generator/runtime/settingsSingleton'`
}
/**
* Adjust Nexus Prisma's [runtime settings](https://pris.ly/nexus-prisma/docs/settings/runtime).
*
* @example
*
* import { PrismaClient } from '@prisma/client'
* import { ApolloServer } from 'apollo-server'
* import { makeSchema } from 'nexus'
* import { User, Post, $settings } from 'nexus-prisma'
*
* new ApolloServer({
* schema: makeSchema({
* types: [],
* }),
* context() {
* return {
* db: new PrismaClient(), // <-- You put Prisma client on the "db" context property
* }
* },
* })
*
* $settings({
* prismaClientContextField: 'db', // <-- Tell Nexus Prisma
* })
*
* @remarks This is _different_ than Nexus Prisma's [_gentime_ settings](https://pris.ly/nexus-prisma/docs/settings/gentime).
*/
export const $settings: typeof Runtime.changeSettings
` + OS.EOL
)
}
function renderTypeScriptDeclarationForEnum(enum_: DMMF.DatamodelEnum, settings: Gentime.Settings): string {
const jsdoc = settings.data.docPropagation.JSDoc ? jsDocForEnum({ enum: enum_, settings }) + '\n' : ''
const description = renderPrismaNodeDocumentationToDescription({ settings, node: enum_ })
return dedent`
${jsdoc}export interface ${enum_.name} {
name: '${enum_.name}'
description: ${description}
members: [${enum_.values.map((value) => `'${value.name}'`).join(', ')}]
}
`
}
function renderTypeScriptDeclarationForModel(model: DMMF.Model, settings: Gentime.Settings): string {
const jsdoc = settings.data.docPropagation.JSDoc ? jsDocForModel({ model, settings }) + '\n' : ''
const description = renderPrismaNodeDocumentationToDescription({ settings, node: model })
return dedent`
${jsdoc}export interface ${model.name} {
$name: '${model.name}'
$description: ${description}
${renderTypeScriptDeclarationForModelFields(model, settings)}
}
`
}
const renderPrismaNodeDocumentationToDescription = (params: {
settings: Gentime.Settings
node: PrismaDmmf.DocumentableNode
}): string => {
return `${
params.node.documentation && params.settings.data.docPropagation.GraphQLDocs ? `string` : `undefined`
}`
}
function renderTypeScriptDeclarationForModelFields(model: DMMF.Model, settings: Gentime.Settings): string {
return model.fields
.map((field) => renderTypeScriptDeclarationForField({ field, model, settings }))
.join('\n')
}
function renderTypeScriptDeclarationForField({
field,
model,
settings,
}: {
field: DMMF.Field
model: DMMF.Model
settings: Gentime.Settings
}): string {
const jsdoc = settings.data.docPropagation.JSDoc ? jsDocForField({ field, model, settings }) + '\n' : ''
const description = renderPrismaNodeDocumentationToDescription({ settings, node: field })
return dedent`
${jsdoc}${field.name}: {
/**
* The name of this field.
*/
name: '${field.name}'
/**
* The type of this field.
*/
type: ${renderNexusType(field, settings)}
/**
* The documentation of this field.
*/
description: ${description}
/**
* The resolver of this field
*/
resolve: NexusCore.FieldResolver<'${model.name}', '${field.name}'>
}
`
}
function renderNexusType(field: DMMF.Field, settings: Gentime.Settings): string {
const graphqlType = fieldTypeToGraphQLType(field, settings.data)
/**
* Relation fields can only work if the related field has been added to the API.
* If it has not, then Nexus will not "know" about it meaning it won't be valid
* within NexusCore.NexusNonNullDef<'...'> etc.
*
* Example:
*
* model Foo {
* bar Bar
* }
*
* model Bar {
* ...
* }
*
* `nexus-prisma` Foo.bar would not work unless the developer has put `Bar` into their API as well.
*
* Meanwhile, in the generated `nexus-prisma` types, to avoid static type errors, we must guard against the
* generated types to not _assume_ that `Bar` etc. has been put into the API.
*
* Thus, we use TS conditional types. We look to see if Nexus typegen has this type.
*/
const typeLiteralMissingNexusOutputTypeErrorMessage = `'Warning/Error: The type \\'${graphqlType}\\' is not amoung the union of GetGen<\\'allNamedTypes\\', string>. This means that either: 1) You need to run nexus typegen reflection. 2) You need to add the type \\'${graphqlType}\\' to your GraphQL API.'`
const conditionalTypeCheck = `'${graphqlType}' extends NexusCore.GetGen<'allNamedTypes', string>`
if (field.isList && field.isRequired) {
return dedent`
${conditionalTypeCheck}
? (NexusCore.NexusListDef<'${graphqlType}'> | NexusCore.NexusNonNullDef<'${graphqlType}'>)
: ${typeLiteralMissingNexusOutputTypeErrorMessage}
`
} else if (field.isList && !field.isRequired) {
return dedent`
${conditionalTypeCheck}
? NexusCore.NexusListDef<'${graphqlType}'> | NexusCore.NexusNullDef<'${graphqlType}'>
: ${typeLiteralMissingNexusOutputTypeErrorMessage}
`
} else if (field.isRequired) {
return dedent`
${conditionalTypeCheck}
? NexusCore.NexusNonNullDef<'${graphqlType}'>
: ${typeLiteralMissingNexusOutputTypeErrorMessage}
`
} else {
return dedent`
${conditionalTypeCheck}
? NexusCore.NexusNullDef<'${graphqlType}'>
: ${typeLiteralMissingNexusOutputTypeErrorMessage}
`
}
}
/**
* Map the fields type to a GraphQL type.
*
* @remarks The `settings` param type uses settings data instead of Setset instance because this helper
* is used at runtime too where we don't have a Setset instance for gentime.
*/
export function fieldTypeToGraphQLType(
field: DMMF.Field,
settings: Gentime.SettingsData
): LiteralUnion<StandardGraphQLScalarType, string> {
// TODO remove once PC is fixed https://prisma-company.slack.com/archives/C016KUHB1R6/p1638816683155000?thread_ts=1638563060.145800&cid=C016KUHB1R6
if (typeof field.type !== 'string') {
throw new TypeError(`field.type is supposed to always be a string.`)
}
switch (field.kind) {
case 'scalar': {
if (field.isId) {
if (field.type === 'String' || (field.type === 'Int' && settings.projectIdIntToGraphQL === 'ID')) {
return StandardGraphQLScalarTypes.ID
}
}
const fieldType = field.type as PrismaScalarType
switch (fieldType) {
case 'String': {
return StandardGraphQLScalarTypes.String
}
case 'Int': {
return StandardGraphQLScalarTypes.Int
}
case 'Boolean': {
return StandardGraphQLScalarTypes.Boolean
}
case 'Float': {
return StandardGraphQLScalarTypes.Float
}
case 'BigInt': {
return 'BigInt'
}
case 'DateTime': {
return 'DateTime'
}
case 'Json': {
return 'Json'
}
case 'Bytes': {
return 'Bytes'
}
case 'Decimal': {
return 'Decimal'
}
default: {
return allCasesHandled(fieldType)
}
}
}
case 'enum': {
return field.type
}
case 'object': {
return field.type
}
case 'unsupported': {
return field.type
}
default:
allCasesHandled(field.kind)
}
} | the_stack |
import { connect } from 'react-redux'
import { getTranslate } from 'react-localize-redux'
import { push } from 'connected-react-router'
import { withRouter } from 'react-router-dom'
import { withStyles } from '@material-ui/core/styles'
import CloseIcon from '@material-ui/icons/Close'
import IconButton from '@material-ui/core/IconButton'
import Link from '@material-ui/core/Link'
import React, { Component } from 'react'
import TextField from '@material-ui/core/TextField'
import Typography from '@material-ui/core/Typography'
import { UserRegisterModel } from 'src/models/users/userRegisterModel'
import AppFooter from 'src/components/appFooter'
import AppForm from 'containers/appForm'
import FormButton from 'src/components/formButton'
import StringAPI from 'src/api/StringAPI'
import TopAppBar from 'src/components/topAppBar'
import * as authorizeActions from 'src/store/actions/authorizeActions'
import * as globalActions from 'src/store/actions/globalActions'
import { ISignupComponentProps } from './ISignupComponentProps'
import { ISignupComponentState } from './ISignupComponentState'
const styles = (theme: any) => ({
root: {
padding: 0,
'label + &': {
marginTop: theme.spacing(3),
},
},
margin: {
margin: theme.spacing.unit,
},
cssLabel: {
'&$cssFocused': {
color: '#f62f5e',
},
},
cssFocused: {},
cssUnderline: {
'&:after': {
borderBottomColor: '#f62f5e',
},
},
cssOutlinedInput: {
'&$cssFocused $notchedOutline': {
borderColor: '#f62f5e',
},
},
notchedOutline: {},
input: {
minWidth: theme.spacing(6),
backgroundColor: theme.palette.common.white,
'&$disabled': {
backgroundColor: theme.palette.divider,
},
},
inputBorder: {
border: '1px solid #e9ddd0',
'&:focus': {
borderColor: theme.palette.secondary.main,
},
},
disabled: {},
inputSizeSmall: {
fontSize: 14,
padding: theme.spacing(1),
width: `calc(100% - ${theme.spacing(2)}px)`,
},
inputSizeMedium: {
fontSize: 16,
padding: theme.spacing(2),
width: `calc(100% - ${theme.spacing(4)}px)`,
},
inputSizeLarge: {
fontSize: 18,
padding: 22,
width: `calc(100% - ${22 * 2}px)`,
},
inputSizeXlarge: {
fontSize: 20,
padding: 25,
width: `calc(100% - ${25 * 2}px)`,
},
formLabel: {
fontSize: 16,
textAlign: 'center',
textTransform: 'capitalize',
position: 'relative',
fontWeight: '400',
color: '6c6c6c',
top: 20,
'&$cssFocused $notchedOutline:': {
position: 'absolute',
top: 0,
textAlign: 'left',
},
},
select: {
height: 'auto',
borderRadius: 0,
},
selectIcon: {
top: '50%',
marginTop: -12,
},
form: {
marginTop: theme.spacing(6),
},
contain: {
margin: '0 auto'
},
paper: {
minHeight: 370,
maxWidth: 450,
minWidth: 337,
textAlign: 'center',
display: 'block',
margin: 'auto'
},
button: {
marginTop: theme.spacing(3),
marginBottom: theme.spacing(2),
borderRadius: 35,
padding: `${theme.spacing(3) - 3}px ${theme.spacing(15)}px`,
},
closeButton: {
position: 'absolute',
right: theme.spacing.unit,
top: theme.spacing.unit,
color: '#6c6c6c'
},
formTitle: {
fontFamily: '"Montserrat", sans-serif',
fontSize: 24,
color: '#2e2e2e',
lineHeight: '150%',
textTransform: 'capitalize',
paddingLeft: '3rem'
},
formlinksright: {
flex: 1,
display: 'inline-flex',
flexDirection: 'row',
justifyContent: 'flex-start',
color: '#f62f5e'
},
formlinksleft: {
flex: 1,
display: 'inline-flex',
flexDirection: 'row',
marginRight: '4rem',
justifyContent: 'flex-end',
},
formlinkswrapper: {
textAlign: 'center'
}
})
// - Create Signup component class
export class SignupComponent extends Component<ISignupComponentProps, ISignupComponentState> {
/**
* Component constructor
* @param {object} props is an object properties of component
*/
constructor(props: ISignupComponentProps) {
super(props)
this.state = {
fullNameInput: '',
fullNameInputError: '',
emailInput: '',
emailInputError: '',
passwordInput: '',
passwordInputError: '',
confirmInput: '',
confirmInputError: ''
}
// Binding function to `this`
this.handleForm = this.handleForm.bind(this)
}
/**
* Handle data on input change
* @param {event} evt is an event of inputs of element on change
*/
handleInputChange = (event: any) => {
const target = event.target
const value = target.type === 'checkbox' ? target.checked : target.value
const name = target.name
this.setState({
[name]: value
})
switch (name) {
case 'fullNameInput':
this.setState({
fullNameInputError: ''
})
break
case 'emailInput':
this.setState({
emailInputError: ''
})
break
case 'passwordInput':
this.setState({
confirmInputError: '',
passwordInputError: ''
})
break
case 'confirmInput':
this.setState({
confirmInputError: '',
passwordInputError: ''
})
break
case 'checkInput':
this.setState({
checkInputError: ''
})
break
default:
}
}
/**
* Handle register form
*/
handleForm = () => {
const { fullNameInput, emailInput, passwordInput, confirmInput } = this.state
const { register, translate } = this.props
let error = false
/* Validate email*/
if (!StringAPI.isValidEmail(emailInput)) {
this.setState({
emailInputError: translate!('signup.validEmailError')
})
error = true
}
/* Check password */
if (passwordInput === '') {
this.setState({
passwordInputError: translate!('signup.passwordRequiredError')
})
error = true
}
if (fullNameInput === '') {
this.setState({
fullNameInputError: translate!('signup.passwordRequiredError')
})
error = true
}
if (confirmInput === '') {
this.setState({
confirmInputError: translate!('signup.confirmRequiredError')
})
error = true
} else if (confirmInput !== passwordInput) {
this.setState({
passwordInputError: translate!('signup.passwordEqualConfirmError'),
confirmInputError: translate!('signup.confirmEqualPasswordError')
})
error = true
}
if (!error) {
register!({
email: emailInput,
password: passwordInput,
fullName: fullNameInput
})
}
}
/**
* Reneder component DOM
* @return {react element} return the DOM which rendered by component
*/
render() {
const { classes, translate, goTo } = this.props
return (
<React.Fragment>
<TopAppBar />
<AppForm>
<React.Fragment>
<IconButton aria-label='Close' className={classes.closeButton} onClick={evt => {goTo!(`/`)}} >
<CloseIcon />
</IconButton>
<Typography variant='h3' gutterBottom align='center' className={classes.formTitle}>
Sign Up
</Typography>
</React.Fragment>
<Typography variant='h3' gutterBottom align='center'>
<TextField fullWidth
className={classes.margin}
InputLabelProps={{
classes: {
root: classes.cssLabel,
focused: classes.cssFocused,
},
className: classes.formLabel,
}}
InputProps={{
classes: {
root: classes.cssOutlinedInput,
focused: classes.cssFocused,
notchedOutline: classes.notchedOutline,
input: classes.inputSizeLarge,
},
}}
variant='outlined'
margin='normal'
required
onChange={this.handleInputChange}
helperText={this.state.fullNameInputError}
error={this.state.fullNameInputError.trim() !== ''}
name='fullNameInput'
label={translate!('signup.fullNameLabel')}
type='name'
/>
<TextField fullWidth
className={classes.margin}
InputLabelProps={{
classes: {
root: classes.cssLabel,
focused: classes.cssFocused,
},
className: classes.formLabel,
}}
InputProps={{
classes: {
root: classes.cssOutlinedInput,
focused: classes.cssFocused,
notchedOutline: classes.notchedOutline,
input: classes.inputSizeLarge,
},
}}
variant='outlined'
margin='normal'
required
onChange={this.handleInputChange}
helperText={this.state.emailInputError}
error={this.state.emailInputError.trim() !== ''}
name='emailInput'
label={translate!('signup.emailLabel')}
type='email'
/>
<TextField fullWidth
className={classes.margin}
InputLabelProps={{
classes: {
root: classes.cssLabel,
focused: classes.cssFocused,
},
className: classes.formLabel,
}}
InputProps={{
classes: {
root: classes.cssOutlinedInput,
focused: classes.cssFocused,
notchedOutline: classes.notchedOutline,
input: classes.inputSizeLarge,
},
}}
variant='outlined'
margin='normal'
required
onChange={this.handleInputChange}
helperText={this.state.passwordInputError}
error={this.state.passwordInputError.trim() !== ''}
name='passwordInput'
label={translate!('signup.passwordLabel')}
type='password'
/>
<TextField fullWidth
className={classes.margin}
InputLabelProps={{
classes: {
root: classes.cssLabel,
focused: classes.cssFocused,
},
className: classes.formLabel,
}}
InputProps={{
classes: {
root: classes.cssOutlinedInput,
focused: classes.cssFocused,
notchedOutline: classes.notchedOutline,
input: classes.inputSizeLarge,
},
}}
variant='outlined'
margin='normal'
required
onChange={this.handleInputChange}
helperText={this.state.confirmInputError}
error={this.state.confirmInputError.trim() !== ''}
name='confirmInput'
label={translate!('signup.confirmPasswordLabel')}
type='password'
/>
<FormButton
className={classes.button}
size='large'
color='secondary'
onClick={this.handleForm}
>
{translate!('signup.createButton')}
</FormButton>
</Typography>
<div className={classes.formlinkswrapper}>
<Typography variant='body2' align='center' className={classes.formlinksleft}>
Already a member?
</Typography>
<Link href='/login' className={classes.formlinksright}>
Sign In
</Link>
</div>
</AppForm>
<AppFooter />
</React.Fragment>
)
}
}
/**
* Map dispatch to props
* @param {func} dispatch is the function to dispatch action to reducers
* @param {object} ownProps is the props belong to component
* @return {object} props of component
*/
const mapDispatchToProps = (dispatch: any, ownProps: ISignupComponentProps) => {
return {
showError: (message: string) => {
dispatch(globalActions.showMessage(message))
},
register: (userRegister: UserRegisterModel) => {
dispatch(authorizeActions.dbSignup(userRegister))
},
loginPage: () => {
dispatch(push('/login'))
},
goTo: (url: string) => dispatch(push(url))
}
}
/**
* Map state to props
* @param {object} state is the obeject from redux store
* @param {object} ownProps is the props belong to component
* @return {object} props of component
*/
const mapStateToProps = (state: any, ownProps: ISignupComponentProps) => {
return {
translate: getTranslate(state.get('locale')),
}
}
// - Connect component to redux store
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(withStyles(styles as any)(SignupComponent as any) as any) as any) | the_stack |
/// <reference path="../../../dist/kurve-global.d.ts" />
const kurve = window["Kurve"] as typeof Kurve;
// Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license. See full license at the bottom of this file.
const init = () => new App();
class App {
private identity: Kurve.Identity;
private graph: Kurve.Graph;
constructor() {
// Setup
const endpointVersion = (<HTMLSelectElement>document.getElementById("model")).value == 'v2' ? kurve.EndpointVersion.v2 : kurve.EndpointVersion.v1;
const clientId = (<HTMLInputElement>document.getElementById("AppID")).value || endpointVersion == kurve.EndpointVersion.v2 ? "13c5e4af-5ea6-4b48-8989-ca25c96ba1c4" : "636e98ea-3024-4810-a66e-cda4bfa0a489";
const loc = document.URL;
const redirectUri = loc.substr(0, loc.indexOf("/Samples/Client/VanillaJS")) + "/dist/login.html";
// Create identity object
this.identity = new kurve.Identity(clientId, redirectUri, { endpointVersion: endpointVersion});
const scopes = endpointVersion == kurve.EndpointVersion.v2 ? { scopes: [kurve.Scopes.Mail.Read, kurve.Scopes.General.OpenId] } : {};
// Login
this.identity.loginAsync(scopes).then(_ => {
////Option 1: Manualy passing the access token
//// or... this.identity.getAccessToken("https://graph.microsoft.com", ((token) => {
//this.identity.getAccessTokenAsync("https://graph.microsoft.com").then(((token) => {
// this.graph = new Kurve.Graph({ defaultAccessToken: token });
//}));
//Option 2: Automatically linking to the Identity object
this.graph = new kurve.Graph(this.identity);
//Update UI
document.getElementById("initDiv").style.display = "none";
document.getElementById("scenarios").style.display = "";
document.getElementById("logoutBtn").addEventListener("click", (() => { this.logout(); }));
document.getElementById("usersWithPaging").addEventListener("click", (() => { this.loadUsersWithPaging(); }));
document.getElementById("usersWithCustomODATA").addEventListener("click", (() => { this.loadUsersWithOdataQuery((<HTMLInputElement>document.getElementById('odataquery')).value); }));
document.getElementById("meUser").addEventListener("click", (() => { this.loadUserMe(); }));
document.getElementById("userById").addEventListener("click", (() => { this.userById(); }));
document.getElementById("userMessages").addEventListener("click", (() => { this.loadUserMessages(); }));
document.getElementById("userEvents").addEventListener("click", (() => { this.loadUserEvents(); }));
document.getElementById("userGroups").addEventListener("click", (() => { this.loadUserGroups(); }));
document.getElementById("userManager").addEventListener("click", (() => { this.loadUserManager(); }));
document.getElementById("groupsWithPaging").addEventListener("click", (() => { this.loadGroupsWithPaging(); }));
document.getElementById("groupById").addEventListener("click", (() => { this.groupById(); }));
document.getElementById("userPhoto").addEventListener("click", (() => { this.loadUserPhoto(); }));
document.getElementById("loggedIn").addEventListener("click", (() => { this.isLoggedIn(); }));
document.getElementById("whoAmI").addEventListener("click", (() => { this.whoAmI(); }));
});
}
//-----------------------------------------------Scenarios---------------------------------------------
//Scenario 1: Logout
private logout(): void {
this.identity.logOut();
}
//Scenario 2: Load users with paging
private loadUsersWithPaging(): void {
document.getElementById("results").innerHTML = "";
this.graph.users.GetUsers().then(users =>
this.showUsers(users)
);
}
//Scenario 3: Load users with custom odata query
private loadUsersWithOdataQuery(query: string): void {
document.getElementById("results").innerHTML = "";
this.graph.users.GetUsers(query).then(users =>
this.showUsers(users)
);
}
//Scenario 4: Load user "me"
private loadUserMe(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then(user =>
document.getElementById("results").innerHTML += user.displayName + "</br>"
).fail(error =>
document.getElementById("results").innerText = JSON.stringify(error)
);
}
//Scenario 5: Load user by ID
private userById(): void {
document.getElementById("results").innerHTML = "";
this.graph.users.$((<HTMLInputElement>document.getElementById("userId")).value).GetUser().then((user) => {
document.getElementById("results").innerHTML += user.displayName + "</br>";
}).fail((error) => {
window.alert(error.status);
});
}
//Scenario 6: Load user "me" and then its messages
private loadUserMessages(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then(user => {
document.getElementById("results").innerHTML += "User:" + user.displayName + "</br>";
document.getElementById("results").innerHTML += "Messages:" + "</br>";
user._context.messages.GetMessages().then(messages =>
this.showMessages(messages)
);
});
}
//Scenario 6: Load user "me" and then its events
private loadUserEvents(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then(user => {
document.getElementById("results").innerHTML += "User:" + user.displayName + "</br>";
document.getElementById("results").innerHTML += "Events:" + "</br>";
user._context.events.GetEvents().then(events =>
this.showEvents(events)
);
});
}
//Scenario 7: Load user "me" and then its groups
private loadUserGroups(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then(user => {
document.getElementById("results").innerHTML += "User:" + user.displayName + "</br>";
document.getElementById("results").innerHTML += "Groups:" + "</br>";
user._context.memberOf.GetGroups("$top=5").then(groups =>
this.showGroups(groups)
);
});
}
//Scenario 8: Load user "me" and then its manager
private loadUserManager(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then((user) => {
document.getElementById("results").innerHTML += "User:" + user.displayName + "</br>";
document.getElementById("results").innerHTML += "Manager:" + "</br>";
user._context.manager.GetUser().then(manager => {
document.getElementById("results").innerHTML += manager.displayName + "</br>";
});
});
}
//Scenario 9: Load groups with paging
private loadGroupsWithPaging(): void {
document.getElementById("results").innerHTML = "";
this.graph.groups.GetGroups().then(groups =>
this.showGroups(groups, 5)
);
}
//Scenario 10: Load group by ID
private groupById(): void {
document.getElementById("results").innerHTML = "";
this.graph.groups.$((<HTMLInputElement>document.getElementById("groupId")).value).GetGroup().then((group) => {
document.getElementById("results").innerHTML += group.displayName + "</br>";
});
}
//Scenario 11: Load user "me" and then its messages
private loadUserPhoto(): void {
document.getElementById("results").innerHTML = "";
this.graph.me.GetUser().then((user) => {
document.getElementById("results").innerHTML += "User:" + user.displayName + "</br>";
document.getElementById("results").innerHTML += "Photo:" + "</br>";
user._context.photo.GetPhotoProperties().then((photo) => {
//Photo metadata
var x = photo;
});
user._context.photo.GetPhotoImage().then((photoValue) => {
var img = document.createElement("img");
var reader = new FileReader();
reader.onloadend = () => {
img.src = reader.result;
}
reader.readAsDataURL(photoValue.raw);
document.getElementById("results").appendChild(img);
});
});
}
//Scenario 12: Is logged in?
private isLoggedIn(): void {
document.getElementById("results").innerText = this.identity.isLoggedIn() ? "True" : "False";
}
//Scenario 13: Who am I?
private whoAmI(): void {
document.getElementById("results").innerText = JSON.stringify(this.identity.getIdToken());
}
//--------------------------------Callbacks---------------------------------------------
private showUsers(users: Kurve.GraphCollection<Kurve.UserDataModel, Kurve.Users, Kurve.User>, limit:number = 5): void {
for (let user of users.value) {
document.getElementById("results").innerHTML += user.displayName + "</br>";
if (limit-- <= 0)
return;
}
users._next && users._next().then(nextUsers =>
this.showUsers(nextUsers, limit)
).fail(error =>
document.getElementById("results").innerText = JSON.stringify(error)
);
}
private showGroups<GraphNode extends Kurve.CollectionNode>(groups: Kurve.GraphCollection<Kurve.GroupDataModel, GraphNode, Kurve.Group>, limit:number = 5): void {
for (let group of groups.value) {
document.getElementById("results").innerHTML += group.displayName + "</br>"
if (limit-- <= 0)
return;
}
groups._next && groups._next().then(nextGroups =>
this.showGroups(nextGroups)
).fail(error =>
document.getElementById("results").innerText = JSON.stringify(error)
);
}
private showMessages(messages: Kurve.GraphCollection<Kurve.MessageDataModel, Kurve.Messages, Kurve.Message>, limit:number = 5): void {
for (let message of messages.value) {
document.getElementById("results").innerHTML += message.subject + "</br>"
if (limit-- <= 0)
return;
}
messages._next && messages._next().then(nextMessages =>
this.showMessages(nextMessages, limit)
).fail(error =>
document.getElementById("results").innerText = error.statusText
);
}
private showEvents(events: Kurve.GraphCollection<Kurve.EventDataModel, Kurve.Events, Kurve.Event>, limit:number = 5): void {
for (let event of events.value) {
document.getElementById("results").innerHTML += event.subject + "</br>"
if (limit-- <= 0)
return;
}
events._next && events._next().then(nextEvents =>
this.showEvents(nextEvents)
).fail(error =>
document.getElementById("results").innerText = error.statusText
);
}
}
//*********************************************************
//
//Kurve js, https://github.com/microsoftdx/kurvejs
//
//Copyright (c) Microsoft Corporation
//All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// ""Software""), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//********************************************************* | the_stack |
import '../prefs/prefs.js';
import {assert} from '//resources/js/assert.m.js';
import {PromiseResolver} from '//resources/js/promise_resolver.m.js';
import {PolymerElement} from '//resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {PrefsMixin} from '../prefs/prefs_mixin.js';
import {CrSettingsPrefs} from '../prefs/prefs_types.js';
import {LanguagesBrowserProxy, LanguagesBrowserProxyImpl} from './languages_browser_proxy.js';
import {LanguageHelper, LanguagesModel, LanguageState, SpellCheckLanguageState} from './languages_types.js';
type SpellCheckLanguages = {
on: Array<SpellCheckLanguageState>,
off: Array<SpellCheckLanguageState>,
};
const MoveType = chrome.languageSettingsPrivate.MoveType;
// Translate server treats some language codes the same.
// See also: components/translate/core/common/translate_util.cc.
const kLanguageCodeToTranslateCode: {[key: string]: string} = {
'nb': 'no',
'fil': 'tl',
'zh-HK': 'zh-TW',
'zh-MO': 'zh-TW',
'zh-SG': 'zh-CN',
};
// Some ISO 639 language codes have been renamed, e.g. "he" to "iw", but
// Translate still uses the old versions. TODO(michaelpg): Chrome does too.
// Follow up with Translate owners to understand the right thing to do.
const kTranslateLanguageSynonyms: {[key: string]: string} = {
'he': 'iw',
'jv': 'jw',
};
// The fake language name used for ARC IMEs. The value must be in sync with the
// one in ui/base/ime/ash/extension_ime_util.h.
const kArcImeLanguage: string = '_arc_ime_language_';
type ModelArgs = {
supportedLanguages: Array<chrome.languageSettingsPrivate.Language>,
translateTarget: string,
alwaysTranslateCodes: Array<string>,
neverTranslateCodes: Array<string>,
startingUILanguage: string,
supportedInputMethods?: Array<chrome.languageSettingsPrivate.InputMethod>,
currentInputMethodId?: string,
};
/**
* Singleton element that generates the languages model on start-up and
* updates it whenever Chrome's pref store and other settings change.
*/
const SettingsLanguagesElementBase = PrefsMixin(PolymerElement);
class SettingsLanguagesElement extends SettingsLanguagesElementBase implements
LanguageHelper {
static get is() {
return 'settings-languages';
}
static get properties() {
return {
languages: {
type: Object,
notify: true,
},
/**
* This element, as a LanguageHelper instance for API usage.
*/
languageHelper: {
type: Object,
notify: true,
readOnly: true,
value() {
return this;
},
},
/**
* PromiseResolver to be resolved when the singleton has been initialized.
*/
resolver_: {
type: Object,
value: () => new PromiseResolver(),
},
/**
* Hash map of supported languages by language codes for fast lookup.
*/
supportedLanguageMap_: {
type: Object,
value: () => new Map(),
},
/**
* Hash set of enabled language codes for membership testing.
*/
enabledLanguageSet_: {
type: Object,
value: () => new Set(),
},
// <if expr="is_win">
/** Prospective UI language when the page was loaded. */
originalProspectiveUILanguage_: String,
// </if>
};
}
static get observers() {
return [
// All observers wait for the model to be populated by including the
// |languages| property.
'alwaysTranslateLanguagesPrefChanged_(' +
'prefs.translate_allowlists.value.*, languages)',
'neverTranslateLanguagesPrefChanged_(' +
'prefs.translate_blocked_languages.value.*, languages)',
// <if expr="is_win">
'prospectiveUILanguageChanged_(prefs.intl.app_locale.value, languages)',
// </if>
'preferredLanguagesPrefChanged_(' +
'prefs.intl.accept_languages.value, languages)',
'preferredLanguagesPrefChanged_(' +
'prefs.intl.forced_languages.value.*, languages)',
'spellCheckDictionariesPrefChanged_(' +
'prefs.spellcheck.dictionaries.value.*, ' +
'prefs.spellcheck.forced_dictionaries.value.*, ' +
'prefs.spellcheck.blocked_dictionaries.value.*, languages)',
'translateLanguagesPrefChanged_(' +
'prefs.translate_blocked_languages.value.*, languages)',
'translateTargetPrefChanged_(' +
'prefs.translate_recent_target.value, languages)',
'updateRemovableLanguages_(' +
'prefs.intl.app_locale.value, languages.enabled)',
'updateRemovableLanguages_(' +
'prefs.translate_blocked_languages.value.*)',
];
}
languages?: LanguagesModel|undefined;
languageHelper: LanguageHelper;
private resolver_: PromiseResolver<void>;
private supportedLanguageMap_:
Map<string, chrome.languageSettingsPrivate.Language>;
private enabledLanguageSet_: Set<string>;
// <if expr="is_win">
private originalProspectiveUILanguage_: string;
// </if>
// <if expr="not is_macosx">
private boundOnSpellcheckDictionariesChanged_:
((statuses:
Array<chrome.languageSettingsPrivate.SpellcheckDictionaryStatus>) =>
void)|null = null;
// </if>
private browserProxy_: LanguagesBrowserProxy =
LanguagesBrowserProxyImpl.getInstance();
private languageSettingsPrivate_: typeof chrome.languageSettingsPrivate;
constructor() {
super();
this.languageSettingsPrivate_ =
this.browserProxy_.getLanguageSettingsPrivate();
}
connectedCallback() {
super.connectedCallback();
const promises: Array<Promise<any>> = [];
/**
* An object passed into createModel to keep track of platform-specific
* arguments, populated by the "promises" array.
*/
const args: ModelArgs = {
supportedLanguages: [],
translateTarget: '',
alwaysTranslateCodes: [],
neverTranslateCodes: [],
startingUILanguage: '',
// Only used by ChromeOS
supportedInputMethods: [],
currentInputMethodId: '',
};
// Wait until prefs are initialized before creating the model, so we can
// include information about enabled languages.
promises.push(CrSettingsPrefs.initialized);
// Get the language list.
promises.push(
new Promise<Array<chrome.languageSettingsPrivate.Language>>(resolve => {
this.languageSettingsPrivate_.getLanguageList(resolve);
}).then(result => {
args.supportedLanguages = result;
}));
// Get the translate target language.
promises.push(new Promise<string>(resolve => {
this.languageSettingsPrivate_.getTranslateTargetLanguage(
resolve);
}).then(result => args.translateTarget = result));
// Get the list of language-codes to always translate.
promises.push(new Promise<Array<string>>(resolve => {
this.languageSettingsPrivate_.getAlwaysTranslateLanguages(
resolve);
}).then(result => {
args.alwaysTranslateCodes = result;
}));
// Get the list of language-codes to never translate.
promises.push(new Promise<Array<string>>(resolve => {
this.languageSettingsPrivate_.getNeverTranslateLanguages(
resolve);
}).then(result => {
args.neverTranslateCodes = result;
}));
// <if expr="is_win">
// Fetch the starting UI language, which affects which actions should be
// enabled.
promises.push(this.browserProxy_.getProspectiveUILanguage().then(
prospectiveUILanguage => {
this.originalProspectiveUILanguage_ =
prospectiveUILanguage || window.navigator.language;
}));
// </if>
Promise.all(promises).then(() => {
if (!this.isConnected) {
// Return early if this element was detached from the DOM before
// this async callback executes (can happen during testing).
return;
}
this.createModel_(args);
// <if expr="not is_macosx">
this.boundOnSpellcheckDictionariesChanged_ =
this.onSpellcheckDictionariesChanged_.bind(this);
this.languageSettingsPrivate_.onSpellcheckDictionariesChanged.addListener(
this.boundOnSpellcheckDictionariesChanged_);
this.languageSettingsPrivate_.getSpellcheckDictionaryStatuses(
this.boundOnSpellcheckDictionariesChanged_);
// </if>
this.resolver_.resolve();
});
}
disconnectedCallback() {
super.disconnectedCallback();
// <if expr="not is_macosx">
if (this.boundOnSpellcheckDictionariesChanged_) {
this.languageSettingsPrivate_.onSpellcheckDictionariesChanged
.removeListener(this.boundOnSpellcheckDictionariesChanged_);
this.boundOnSpellcheckDictionariesChanged_ = null;
}
// </if>
}
// <if expr="is_win">
/**
* Updates the prospective UI language based on the new pref value.
*/
private prospectiveUILanguageChanged_(prospectiveUILanguage: string) {
this.set(
'languages.prospectiveUILanguage',
prospectiveUILanguage || this.originalProspectiveUILanguage_);
}
// </if>
/**
* Updates the list of enabled languages from the preferred languages pref.
*/
private preferredLanguagesPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
const enabledLanguageStates = this.getEnabledLanguageStates_(
this.languages.translateTarget, this.languages.prospectiveUILanguage);
// Recreate the enabled language set before updating languages.enabled.
this.enabledLanguageSet_.clear();
for (let i = 0; i < enabledLanguageStates.length; i++) {
this.enabledLanguageSet_.add(enabledLanguageStates[i].language.code);
}
this.set('languages.enabled', enabledLanguageStates);
// <if expr="not is_macosx">
if (this.boundOnSpellcheckDictionariesChanged_) {
this.languageSettingsPrivate_.getSpellcheckDictionaryStatuses(
this.boundOnSpellcheckDictionariesChanged_);
}
// </if>
// Update translate target language.
new Promise(resolve => {
this.languageSettingsPrivate_.getTranslateTargetLanguage(resolve);
}).then(result => {
this.set('languages.translateTarget', result);
});
}
/**
* Updates the spellCheckEnabled state of each enabled language.
*/
private spellCheckDictionariesPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
const spellCheckSet =
this.makeSetFromArray_(this.getPref('spellcheck.dictionaries').value);
const spellCheckForcedSet = this.makeSetFromArray_(
this.getPref('spellcheck.forced_dictionaries').value);
const spellCheckBlockedSet = this.makeSetFromArray_(
this.getPref('spellcheck.blocked_dictionaries').value);
for (let i = 0; i < this.languages.enabled.length; i++) {
const languageState = this.languages.enabled[i];
const isUser = spellCheckSet.has(languageState.language.code);
const isForced = spellCheckForcedSet.has(languageState.language.code);
const isBlocked = spellCheckBlockedSet.has(languageState.language.code);
this.set(
`languages.enabled.${i}.spellCheckEnabled`,
(isUser && !isBlocked) || isForced);
this.set(`languages.enabled.${i}.isManaged`, isForced || isBlocked);
}
const {on: spellCheckOnLanguages, off: spellCheckOffLanguages} =
this.getSpellCheckLanguages_(this.languages.supported);
this.set('languages.spellCheckOnLanguages', spellCheckOnLanguages);
this.set('languages.spellCheckOffLanguages', spellCheckOffLanguages);
}
/**
* Returns two arrays of SpellCheckLanguageStates for spell check languages:
* one for spell check on, one for spell check off.
* @param supportedLanguages The list of supported languages, normally
* this.languages.supported.
*/
private getSpellCheckLanguages_(
supportedLanguages: Array<chrome.languageSettingsPrivate.Language>):
SpellCheckLanguages {
// The spell check preferences are prioritised in this order:
// forced_dictionaries, blocked_dictionaries, dictionaries.
// The set of all language codes seen thus far.
const seenCodes = new Set<string>();
/**
* Gets the list of language codes indicated by the preference name, and
* de-duplicates it with all other language codes.
*/
const getPrefAndDedupe = (prefName: string): Array<string> => {
const result =
this.getPref(prefName).value.filter((x: string) => !seenCodes.has(x));
result.forEach((code: string) => seenCodes.add(code));
return result;
};
const forcedCodes = getPrefAndDedupe('spellcheck.forced_dictionaries');
const forcedCodesSet = new Set(forcedCodes);
const blockedCodes = getPrefAndDedupe('spellcheck.blocked_dictionaries');
const blockedCodesSet = new Set(blockedCodes);
const enabledCodes = getPrefAndDedupe('spellcheck.dictionaries');
const /** !Array<SpellCheckLanguageState> */ on = [];
// We want to add newly enabled languages to the end of the "on" list, so we
// should explicitly move the forced languages to the front of the list.
for (const code of [...forcedCodes, ...enabledCodes]) {
const language = this.supportedLanguageMap_.get(code);
// language could be undefined if code is not in supportedLanguageMap_.
// This should be rare, but could happen if supportedLanguageMap_ is
// missing languages or the prefs are manually modified. We want to fail
// gracefully if this happens - throwing an error here would cause
// language settings to not load.
if (language) {
on.push({
language,
isManaged: forcedCodesSet.has(code),
spellCheckEnabled: true,
downloadDictionaryStatus: null,
downloadDictionaryFailureCount: 0,
});
}
}
// Because the list of "spell check supported" languages is only exposed
// through "supported languages", we need to filter that list along with
// whether we've seen the language before.
// We don't want to split this list in "forced" / "not-forced" like the
// spell check on list above, as we don't want to explicitly surface / hide
// blocked languages to the user.
const /** !Array<SpellCheckLanguageState> */ off = [];
for (const language of supportedLanguages) {
// If spell check is off for this language, it must either not be in any
// spell check pref, or be in the blocked dictionaries pref.
if (language.supportsSpellcheck &&
(!seenCodes.has(language.code) ||
blockedCodesSet.has(language.code))) {
off.push({
language,
isManaged: blockedCodesSet.has(language.code),
spellCheckEnabled: false,
downloadDictionaryStatus: null,
downloadDictionaryFailureCount: 0
});
}
}
return {
on,
off,
};
}
/**
* Updates the list of always translate languages from translate prefs.
*/
private alwaysTranslateLanguagesPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
const alwaysTranslateCodes =
Object.keys(this.getPref('translate_allowlists').value);
const alwaysTranslateLanguages =
alwaysTranslateCodes.map(code => this.getLanguage(code));
this.set('languages.alwaysTranslate', alwaysTranslateLanguages);
}
/**
* Updates the list of never translate languages from translate prefs.
*/
private neverTranslateLanguagesPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
const neverTranslateCodes =
this.getPref('translate_blocked_languages').value;
const neverTranslateLanguages =
neverTranslateCodes.map((code: string) => this.getLanguage(code));
this.set('languages.neverTranslate', neverTranslateLanguages);
}
private translateLanguagesPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
const translateBlockedPrefValue =
this.getPref('translate_blocked_languages').value as Array<string>;
const translateBlockedSet =
this.makeSetFromArray_(translateBlockedPrefValue);
for (let i = 0; i < this.languages.enabled.length; i++) {
const language = this.languages.enabled[i].language;
const translateEnabled = this.isTranslateEnabled_(
language.code, !!language.supportsTranslate, translateBlockedSet,
this.languages.translateTarget, this.languages.prospectiveUILanguage);
this.set(
'languages.enabled.' + i + '.translateEnabled', translateEnabled);
}
}
private translateTargetPrefChanged_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
this.set(
'languages.translateTarget',
this.getPref('translate_recent_target').value);
}
/**
* Constructs the languages model.
* @param args used to populate the model above.
*/
private createModel_(args: ModelArgs) {
// Populate the hash map of supported languages.
for (let i = 0; i < args.supportedLanguages.length; i++) {
const language = args.supportedLanguages[i];
language.supportsUI = !!language.supportsUI;
language.supportsTranslate = !!language.supportsTranslate;
language.supportsSpellcheck = !!language.supportsSpellcheck;
language.isProhibitedLanguage = !!language.isProhibitedLanguage;
this.supportedLanguageMap_.set(language.code, language);
}
let prospectiveUILanguage;
// <if expr="is_win">
// eslint-disable-next-line prefer-const
prospectiveUILanguage = this.getPref('intl.app_locale').value ||
this.originalProspectiveUILanguage_;
// </if>
// Create a list of enabled languages from the supported languages.
const enabledLanguageStates = this.getEnabledLanguageStates_(
args.translateTarget, prospectiveUILanguage);
// Populate the hash set of enabled languages.
for (let l = 0; l < enabledLanguageStates.length; l++) {
this.enabledLanguageSet_.add(enabledLanguageStates[l].language.code);
}
const {on: spellCheckOnLanguages, off: spellCheckOffLanguages} =
this.getSpellCheckLanguages_(args.supportedLanguages);
const alwaysTranslateLanguages =
args.alwaysTranslateCodes.map(code => this.getLanguage(code)!);
const neverTranslateLangauges =
args.neverTranslateCodes.map(code => this.getLanguage(code)!);
const model = {
supported: args.supportedLanguages,
enabled: enabledLanguageStates,
translateTarget: args.translateTarget,
alwaysTranslate: alwaysTranslateLanguages,
neverTranslate: neverTranslateLangauges,
spellCheckOnLanguages,
spellCheckOffLanguages,
// <if expr="is_win">
prospectiveUILanguage: prospectiveUILanguage,
// </if>
};
// Initialize the Polymer languages model.
this.languages = model;
}
/**
* Returns a list of LanguageStates for each enabled language in the supported
* languages list.
* @param translateTarget Language code of the default translate
* target language.
* @param prospectiveUILanguage Prospective UI display language. Only defined
* on Windows and Chrome OS.
*/
private getEnabledLanguageStates_(
translateTarget: string,
prospectiveUILanguage: string|undefined): Array<LanguageState> {
assert(CrSettingsPrefs.isInitialized);
const pref = this.getPref('intl.accept_languages');
const enabledLanguageCodes = pref.value.split(',');
const languagesForcedPref = this.getPref('intl.forced_languages');
const spellCheckPref = this.getPref('spellcheck.dictionaries');
const spellCheckForcedPref = this.getPref('spellcheck.forced_dictionaries');
const spellCheckBlockedPref =
this.getPref('spellcheck.blocked_dictionaries');
const languageForcedSet = this.makeSetFromArray_(languagesForcedPref.value);
const spellCheckSet = this.makeSetFromArray_(
spellCheckPref.value.concat(spellCheckForcedPref.value));
const spellCheckForcedSet =
this.makeSetFromArray_(spellCheckForcedPref.value);
const spellCheckBlockedSet =
this.makeSetFromArray_(spellCheckBlockedPref.value);
const translateBlockedPrefValue =
this.getPref('translate_blocked_languages').value as Array<string>;
const translateBlockedSet =
this.makeSetFromArray_(translateBlockedPrefValue);
const enabledLanguageStates: Array<LanguageState> = [];
for (let i = 0; i < enabledLanguageCodes.length; i++) {
const code = enabledLanguageCodes[i];
const language = this.supportedLanguageMap_.get(code);
// Skip unsupported languages.
if (!language) {
continue;
}
const languageState: LanguageState = {
language: language,
spellCheckEnabled:
spellCheckSet.has(code) && !spellCheckBlockedSet.has(code) ||
spellCheckForcedSet.has(code),
translateEnabled: this.isTranslateEnabled_(
code, !!language.supportsTranslate, translateBlockedSet,
translateTarget, prospectiveUILanguage),
isManaged:
spellCheckForcedSet.has(code) || spellCheckBlockedSet.has(code),
isForced: languageForcedSet.has(code),
downloadDictionaryFailureCount: 0,
removable: false,
downloadDictionaryStatus: null,
};
enabledLanguageStates.push(languageState);
}
return enabledLanguageStates;
}
/**
* True iff we translate pages that are in the given language.
* @param code Language code.
* @param supportsTranslate If translation supports the given language.
* @param translateBlockedSet Set of languages for which translation is
* blocked.
* @param translateTarget Language code of the default translate target
* language.
* @param prospectiveUILanguage Prospective UI display language. Only define
* on Windows and Chrome OS.
*/
private isTranslateEnabled_(
code: string, supportsTranslate: boolean,
translateBlockedSet: Set<string>, translateTarget: string,
prospectiveUILanguage: string|undefined): boolean {
const translateCode = this.convertLanguageCodeForTranslate(code);
return supportsTranslate && !translateBlockedSet.has(translateCode) &&
translateCode !== translateTarget &&
(!prospectiveUILanguage || code !== prospectiveUILanguage);
}
// <if expr="not is_macosx">
/**
* Updates the dictionary download status for spell check languages in order
* to track the number of times a spell check dictionary download has failed.
*/
private onSpellcheckDictionariesChanged_(
statuses:
Array<chrome.languageSettingsPrivate.SpellcheckDictionaryStatus>) {
const statusMap = new Map();
statuses.forEach(status => {
statusMap.set(status.languageCode, status);
});
const collectionNames =
['enabled', 'spellCheckOnLanguages', 'spellCheckOffLanguages'];
const languages = this.languages as unknown as
{[k: string]: Array<LanguageState|SpellCheckLanguageState>};
collectionNames.forEach(collectionName => {
languages[collectionName].forEach((languageState, index) => {
const status = statusMap.get(languageState.language.code);
if (!status) {
return;
}
const previousStatus = languageState.downloadDictionaryStatus;
const keyPrefix = `languages.${collectionName}.${index}`;
this.set(`${keyPrefix}.downloadDictionaryStatus`, status);
const failureCountKey = `${keyPrefix}.downloadDictionaryFailureCount`;
if (status.downloadFailed &&
!(previousStatus && previousStatus.downloadFailed)) {
const failureCount = languageState.downloadDictionaryFailureCount + 1;
this.set(failureCountKey, failureCount);
} else if (
status.isReady && !(previousStatus && previousStatus.isReady)) {
this.set(failureCountKey, 0);
}
});
});
}
// </if>
/**
* Updates the |removable| property of the enabled language states based
* on what other languages and input methods are enabled.
*/
private updateRemovableLanguages_() {
if (this.prefs === undefined || this.languages === undefined) {
return;
}
for (let i = 0; i < this.languages.enabled.length; i++) {
const languageState = this.languages.enabled[i];
this.set(
'languages.enabled.' + i + '.removable',
this.canDisableLanguage(languageState));
}
}
/**
* Creates a Set from the elements of the array.
*/
private makeSetFromArray_<T>(list: Array<T>): Set<T> {
return new Set(list);
}
// LanguageHelper implementation.
whenReady(): Promise<void> {
return this.resolver_.promise;
}
// <if expr="is_win">
/**
* Sets the prospective UI language to the chosen language. This won't affect
* the actual UI language until a restart.
*/
setProspectiveUILanguage(languageCode: string) {
this.browserProxy_.setProspectiveUILanguage(languageCode);
}
/**
* True if the prospective UI language was changed from its starting value.
*/
requiresRestart(): boolean {
return this.originalProspectiveUILanguage_ !==
this.languages!.prospectiveUILanguage;
}
// </if>
/**
* @return The language code for ARC IMEs.
*/
getArcImeLanguageCode(): string {
return kArcImeLanguage;
}
/**
* @return True if the language is for ARC IMEs.
*/
isLanguageCodeForArcIme(languageCode: string): boolean {
return languageCode === kArcImeLanguage;
}
/**
* @return True if the language can be translated by Chrome.
*/
isLanguageTranslatable(language: chrome.languageSettingsPrivate.Language):
boolean {
if (language.code === 'zh-CN' || language.code === 'zh-TW') {
// In Translate, general Chinese is not used, and the sub code is
// necessary as a language code for the Translate server.
return true;
}
if (language.code === this.getLanguageCodeWithoutRegion(language.code) &&
language.supportsTranslate) {
return true;
}
return false;
}
/**
* @return True if the language is enabled.
*/
isLanguageEnabled(languageCode: string): boolean {
return this.enabledLanguageSet_.has(languageCode);
}
/**
* Enables the language, making it available for spell check and input.
*/
enableLanguage(languageCode: string) {
if (!CrSettingsPrefs.isInitialized) {
return;
}
this.languageSettingsPrivate_.enableLanguage(languageCode);
}
/**
* Disables the language.
*/
disableLanguage(languageCode: string) {
if (!CrSettingsPrefs.isInitialized) {
return;
}
// Remove the language from spell check.
this.deletePrefListItem('spellcheck.dictionaries', languageCode);
// Remove the language from preferred languages.
this.languageSettingsPrivate_.disableLanguage(languageCode);
}
isOnlyTranslateBlockedLanguage(languageState: LanguageState): boolean {
return !languageState.translateEnabled &&
this.languages!.enabled.filter(lang => !lang.translateEnabled)
.length === 1;
}
canDisableLanguage(languageState: LanguageState): boolean {
// Cannot disable the prospective UI language.
if (languageState.language.code === this.languages!.prospectiveUILanguage) {
return false;
}
// Cannot disable the only enabled language.
if (this.languages!.enabled.length === 1) {
return false;
}
// Cannot disable the last translate blocked language.
if (this.isOnlyTranslateBlockedLanguage(languageState)) {
return false;
}
return true;
}
canEnableLanguage(language: chrome.languageSettingsPrivate.Language):
boolean {
return !(
this.isLanguageEnabled(language.code) ||
language.isProhibitedLanguage ||
this.isLanguageCodeForArcIme(language.code) /* internal use only */);
}
/**
* Sets whether a given language should always be automatically translated.
*/
setLanguageAlwaysTranslateState(
languageCode: string, alwaysTranslate: boolean) {
this.languageSettingsPrivate_.setLanguageAlwaysTranslateState(
languageCode, alwaysTranslate);
}
/**
* Moves the language in the list of enabled languages either up (toward the
* front of the list) or down (toward the back).
* @param upDirection True if we need to move up, false if we need to move
* down
*/
moveLanguage(languageCode: string, upDirection: boolean) {
if (!CrSettingsPrefs.isInitialized) {
return;
}
if (upDirection) {
this.languageSettingsPrivate_.moveLanguage(languageCode, MoveType.UP);
} else {
this.languageSettingsPrivate_.moveLanguage(languageCode, MoveType.DOWN);
}
}
/**
* Moves the language directly to the front of the list of enabled languages.
*/
moveLanguageToFront(languageCode: string) {
if (!CrSettingsPrefs.isInitialized) {
return;
}
this.languageSettingsPrivate_.moveLanguage(languageCode, MoveType.TOP);
}
/**
* Enables translate for the given language by removing the translate
* language from the blocked languages preference.
*/
enableTranslateLanguage(languageCode: string) {
this.languageSettingsPrivate_.setEnableTranslationForLanguage(
languageCode, true);
}
/**
* Disables translate for the given language by adding the translate
* language to the blocked languages preference.
*/
disableTranslateLanguage(languageCode: string) {
this.languageSettingsPrivate_.setEnableTranslationForLanguage(
languageCode, false);
}
/**
* Sets the translate target language and adds it to the content languages if
* not already there.
*/
setTranslateTargetLanguage(languageCode: string) {
this.languageSettingsPrivate_.setTranslateTargetLanguage(languageCode);
}
/**
* Enables or disables spell check for the given language.
*/
toggleSpellCheck(languageCode: string, enable: boolean) {
if (!this.languages) {
return;
}
if (enable) {
const spellCheckPref = this.getPref('spellcheck.dictionaries');
this.appendPrefListItem('spellcheck.dictionaries', languageCode);
} else {
this.deletePrefListItem('spellcheck.dictionaries', languageCode);
}
}
/**
* Converts the language code for translate. There are some differences
* between the language set the Translate server uses and that for
* Accept-Language.
*/
convertLanguageCodeForTranslate(languageCode: string): string {
if (languageCode in kLanguageCodeToTranslateCode) {
return kLanguageCodeToTranslateCode[languageCode];
}
const main = languageCode.split('-')[0];
if (main === 'zh') {
// In Translate, general Chinese is not used, and the sub code is
// necessary as a language code for the Translate server.
return languageCode;
}
if (main in kTranslateLanguageSynonyms) {
return kTranslateLanguageSynonyms[main];
}
return main;
}
/**
* Given a language code, returns just the base language. E.g., converts
* 'en-GB' to 'en'.
*/
getLanguageCodeWithoutRegion(languageCode: string): string {
// The Norwegian languages fall under the 'no' macrolanguage.
if (languageCode === 'nb' || languageCode === 'nn') {
return 'no';
}
// The installer still uses the old language code "iw", instead of "he",
// for Hebrew. It needs to be converted to "he", otherwise it will not be
// found in supportedLanguageMap_.
//
// Note that this value is saved in the user's local state. Even
// if the installer is changed to use "he", because the installer does not
// overwrite this value, the conversion is still needed for old users.
if (languageCode === 'iw') {
return 'he';
}
// Match the characters before the hyphen.
const result = languageCode.match(/^([^-]+)-?/)!;
assert(result.length === 2);
return result[1];
}
getLanguage(languageCode: string): chrome.languageSettingsPrivate.Language
|undefined {
// If a languageCode is not found, try language without location.
return this.supportedLanguageMap_.get(languageCode) ||
this.supportedLanguageMap_.get(
this.getLanguageCodeWithoutRegion(languageCode));
}
/**
* Retries downloading the dictionary for |languageCode|.
*/
retryDownloadDictionary(languageCode: string) {
this.languageSettingsPrivate_.retryDownloadDictionary(languageCode);
}
}
customElements.define(SettingsLanguagesElement.is, SettingsLanguagesElement); | the_stack |
import {BigNumber} from 'bignumber.js';
import {TransactionConfig} from 'web3-core';
import {UserSigner, UserSecretKey, Transaction, Nonce, Balance, ChainID, GasLimit, GasPrice, TransactionPayload, TransactionVersion, Address} from '@elrondnetwork/erdjs';
import {egldBroadcast, egldGetTransactionsCount} from '../blockchain';
import {axios, validateBody} from '../connector/tatum';
import {ESDT_SYSTEM_SMART_CONTRACT_ADDRESS, TATUM_API_URL} from '../constants';
import {
CreateRecord,
Currency,
EgldEsdtTransaction,
EgldBasicTransaction,
EgldSendTransaction,
EsdtAddOrBurnNftQuantity,
EsdtControlChanges,
EsdtCreateNftOrSft,
EsdtFreezeOrWipeNft,
EsdtFreezeOrWipeOrOwnership,
EsdtIssue,
EsdtIssueNftOrSft,
EsdtMint,
EsdtSpecialRole,
EsdtToken,
EsdtTransfer,
EsdtTransferNft,
EsdtTransferNftCreateRole,
TransactionKMS,
} from '../model';
import {generateAddressFromPrivatekey} from '../wallet/address';
const ELROND_V3_ENDPOINT = () => `${process.env.TATUM_API_URL || TATUM_API_URL}/v3/egld/node`;
/**
* Get Elrond network config
*/
export const egldGetConfig = async () => {
const gasStationUrl = await getEgldClient();
try {
const {data} = await axios.get(`${gasStationUrl}/${process.env.TATUM_API_KEY}/network/config`);
return data
} catch (e) {
console.error(e.toString())
}
return null
}
/**
* Estimate Gas price for the transaction.
*/
export const egldGetGasPrice = async (): Promise<number> => {
const { data } = await egldGetConfig();
const price = data?.config?.erd_min_gas_price;
if (price) {
return price;
}
throw Error(data?.data?.returnMessage || 'egld.gasPrice.error')
}
/**
* Estimate Gas limit for the transaction.
*/
export const egldGetGasLimit = async (tx: EgldBasicTransaction): Promise<number> => {
const gasStationUrl = await getEgldClient();
const {data} = await axios.post(`${gasStationUrl}/${process.env.TATUM_API_KEY}/transaction/cost`, tx);
const gas = data?.data?.txGasUnits;
if (gas) {
return gas;
}
throw Error(data?.data?.returnMessage || 'egld.gasLimit.error')
}
/**
* Sign transaction
*/
export const signEgldTransaction = async (tx: Transaction, fromPrivateKey: string): Promise<string> => {
const fromAddrSigner = new UserSigner(UserSecretKey.fromString(fromPrivateKey));
fromAddrSigner.sign(tx);
return JSON.stringify(tx.toSendable());
}
/**
* Returns EGLD server to connect to.
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @param fromPrivateKey optional private key of sender account
*/
export const getEgldClient = (provider?: string) => {
const client = (provider || ELROND_V3_ENDPOINT())
return client
}
/**
* Sign EGLD pending transaction from Tatum KMS
* @param tx pending transaction from KMS
* @param fromPrivateKey private key to sign transaction with.
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const signEgldKMSTransaction = async (tx: TransactionKMS, fromPrivateKey: string, provider?: string) => {
if (tx.chain !== Currency.EGLD) {
throw Error('Unsupported chain.')
}
const client = getEgldClient(provider)
const transaction = JSON.parse(tx.serializedTransaction)
return await prepareSignedTransactionAbstraction(client, transaction, undefined, fromPrivateKey)
}
/**
* Sign EGLD Store data transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldStoreDataTransaction = async (body: CreateRecord, provider?: string) => {
await validateBody(body, CreateRecord);
const {
fromPrivateKey,
signatureId,
from,
data,
} = body;
const client = getEgldClient(provider);
const address = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string);
if (!address) {
throw new Error('Recipient must be provided.');
}
const tx: TransactionConfig = {
from: from || 0,
to: address,
value: '0',
data,
};
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
};
/**
* Encode number for ESDT transaction
* @param n number or BigNumber
* @returns n as hex encoded string with an even number of characters
*/
const encodeNumber = (n: number | BigNumber): string => {
const bn = new BigNumber(n)
if (bn.isNaN()) {
return ''
}
const result = bn.toString(16).toLowerCase()
return `${(result.length % 2 ? '' : '0') + result}`
}
/**
* Prepare properties for ESDT Issue transaction
* @param props content of the data transaction
* @returns props as encoded string
*/
const prepareProperties = (props: any): string => {
if (!props) {
return ''
}
const keys = Object.keys(props)
const asHexTrue = '0x01' // Buffer.from('true').toString('hex')
const asHexFalse = '0x' // Buffer.from('false').toString('hex')
let result = ''
for (const k of keys) {
result += `@${Buffer.from(k).toString('hex')}@${props[k] ? asHexTrue : asHexFalse}`
}
return result
}
/**
* Prepare data for ESDT transactions
* @param data content of the data
* @returns data as string
*/
const prepareEgldEsdtIssuanceData = async (data: EsdtIssue): Promise<string> => {
await validateBody(data, EsdtIssue)
const tokenName = Buffer.from(data.name).toString('hex')
const tokenTicker = Buffer.from(data.symbol).toString('hex')
const initialSupply = encodeNumber(data.supply)
const decimals = encodeNumber(data.digits)
const properties = prepareProperties(data.properties)
return `${data.service}@${tokenName}@${tokenTicker}@${initialSupply}@${decimals}` + properties
}
const prepareEgldEsdtTransferData = async (data: EsdtTransfer): Promise<string> => {
await validateBody(data, EsdtTransfer)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const value = encodeNumber(data.value)
let args = ''
if (data.methodName) {
args += '@' + Buffer.from(data.methodName).toString('hex')
for (const k of data.arguments || []) {
if (new BigNumber(k).isNaN()) {
args += `@${Buffer.from(k as string).toString('hex')}`
} else {
args += `@${encodeNumber(new BigNumber(k))}`
}
}
}
return `${data.service}@${tokenId}@${value}` + args
}
const prepareEgldEsdtMintOrBurnData = async (data: EsdtMint): Promise<string> => {
await validateBody(data, EsdtMint)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const supply = encodeNumber(data.supply)
return `${data.service}@${tokenId}@${supply}`
}
const prepareEgldEsdtPauseData = async (data: EsdtToken): Promise<string> => {
await validateBody(data, EsdtToken)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
return `${data.service}@${tokenId}`
}
const prepareEgldEsdtFreezeOrWipeOrOwnershipData = async (data: EsdtFreezeOrWipeOrOwnership): Promise<string> => {
await validateBody(data, EsdtFreezeOrWipeOrOwnership)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const account = Buffer.from(data.account).toString('hex')
return `${data.service}@${tokenId}@${account}`
}
const prepareEgldEsdtSpecialRoleData = async (data: EsdtSpecialRole): Promise<string> => {
await validateBody(data, EsdtSpecialRole)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const account = Buffer.from(data.account).toString('hex')
let roles = ''
for (const k of data.role) {
roles += `@${Buffer.from(k).toString('hex')}`
}
return `${data.service}@${tokenId}@${account}`+ roles
}
const prepareEgldEsdtControlChangesData = async (data: EsdtControlChanges): Promise<string> => {
await validateBody(data, EsdtControlChanges)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const properties = prepareProperties(data.properties)
return `${data.service}@${tokenId}` + properties
}
const prepareEgldIssuanceNftOrSftData = async (data: EsdtIssueNftOrSft): Promise<string> => {
await validateBody(data, EsdtIssueNftOrSft)
const tokenName = Buffer.from(data.name).toString('hex')
const tokenTicker = Buffer.from(data.symbol).toString('hex')
const properties = prepareProperties(data.properties)
return `${data.service}@${tokenName}@${tokenTicker}` + properties
}
const prepareEgldCreateNftOrSftData = async (data: EsdtCreateNftOrSft): Promise<string> => {
await validateBody(data, EsdtCreateNftOrSft)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const nftName = Buffer.from(data.nftName).toString('hex')
const quantity = encodeNumber(data.quantity)
const royalties = encodeNumber(new BigNumber(data.royalties).multipliedBy(100))
const attributes = Buffer.from(data.attributes).toString('hex')
let uris = ''
for (const k of data.uri) {
uris += `@${Buffer.from(k).toString('hex')}`
}
return `${data.service}@${tokenId}@${quantity}@${nftName}@${royalties}`
+ `@${data.hash}@${attributes}` + uris
}
const prepareEgldTransferNftCreateRoleData = async (data: EsdtTransferNftCreateRole): Promise<string> => {
await validateBody(data, EsdtTransferNftCreateRole)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const from = Buffer.from(data.from).toString('hex')
const to = Buffer.from(data.to).toString('hex')
return `${data.service}@${tokenId}@${from}@${to}`
}
const prepareEgldStopNftCreateData = async (data: EsdtToken): Promise<string> => {
await validateBody(data, EsdtToken)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
return `${data.service}@${tokenId}`
}
const prepareEgldAddOrBurnNftQuantityData = async (data: EsdtAddOrBurnNftQuantity): Promise<string> => {
await validateBody(data, EsdtAddOrBurnNftQuantity)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const nonce = encodeNumber(data.nonce)
const quantity = encodeNumber(data.quantity)
return `${data.service}@${tokenId}@${nonce}@${quantity}`
}
const prepareEgldFreezeOrWipeNftData = async (data: EsdtFreezeOrWipeNft): Promise<string> => {
await validateBody(data, EsdtFreezeOrWipeNft)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const nonce = encodeNumber(data.nonce)
const account = Buffer.from(data.account).toString('hex')
return `${data.service}@${tokenId}@${nonce}@${account}`
}
const prepareEgldTransferNftData = async (data: EsdtTransferNft): Promise<string> => {
await validateBody(data, EsdtTransferNft)
const tokenId = Buffer.from(data.tokenId as string).toString('hex')
const nonce = encodeNumber(data.nonce)
const quantity = encodeNumber(data.quantity)
const to = Buffer.from(data.to).toString('hex')
let args = ''
if (data.methodName) {
args += '@' + Buffer.from(data.methodName).toString('hex')
for (const k of data.arguments || []) {
if (new BigNumber(k).isNaN()) {
args += `@${Buffer.from(k as string).toString('hex')}`
} else {
args += `@${encodeNumber(new BigNumber(k))}`
}
}
}
return `${data.service}@${tokenId}@${nonce}@${quantity}@${to}` + args
}
/**
* Sign transaction abstraction. Nothing is broadcast to the blockchain.
* @param client Web3 client of the EGLD Server to connect to. If not set, default public server will be used.
* @param transaction content of the transaction to broadcast
* @param signatureId signature ID
* @param fromPrivateKey private key
* @param fee Fee object
* @returns transaction data to be broadcast to blockchain.
*/
const prepareSignedTransactionAbstraction = async (
client: string, transaction: TransactionConfig, signatureId: string | undefined, fromPrivateKey: string | undefined
): Promise<string> => {
const sender = transaction.from as string || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string);
const { data } = await egldGetConfig();
const { config } = data;
const gasPrice = config?.erd_min_gas_price || 1000000000;
const nonce = await egldGetTransactionsCount(sender as string);
const egldTx: EgldSendTransaction = {
nonce,
value: new BigNumber(transaction.value as string).isLessThan(0) ? '0' : new BigNumber(transaction.value as string).multipliedBy(1e18).toString(),
receiver: transaction.to as string,
sender,
gasPrice,
gasLimit: 0,
data: transaction.data ? Buffer.from(transaction.data as string).toString('base64') : undefined,
chainID: config.erd_chain_id,
version: config.erd_min_transaction_version,
};
const gasLimit = await egldGetGasLimit(egldTx);
egldTx.gasLimit = gasLimit;
if (signatureId) {
return JSON.stringify({
from: sender,
to: transaction.to as string,
value: transaction.value as string,
data: transaction.data,
gasPrice,
gasLimit,
});
}
const erdjsTransaction = new Transaction({
nonce: new Nonce(egldTx.nonce),
value: Balance.fromString(egldTx.value),
receiver: new Address(egldTx.receiver),
sender: new Address(egldTx.sender),
gasPrice: new GasPrice(egldTx.gasPrice),
gasLimit: new GasLimit(egldTx.gasLimit),
data: transaction.data ? new TransactionPayload(transaction.data) : undefined,
chainID: new ChainID(egldTx.chainID),
version: new TransactionVersion(egldTx.version),
});
return await signEgldTransaction(erdjsTransaction, fromPrivateKey as string);
}
/**
* Sign ESDT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldDeployEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0.05
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldEsdtIssuanceData({ ...data, service: data.service || 'issue' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldTransferEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
to,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to,
data: await prepareEgldEsdtTransferData({ ...data, service: data.service || 'ESDTTransfer' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT mint transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldMintEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtMintOrBurnData({ ...data, service: data.service || 'mint' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT burn transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldBurnEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtMintOrBurnData({ ...data, service: data.service || 'ESDTBurn' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT pause transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldPauseEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtPauseData({ ...data, service: data.service || 'pause' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT special role transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldSpecialRoleEsdtOrNftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtSpecialRoleData({ ...data, service: data.service || 'setSpecialRole' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT freze | wipe | transfer ownership transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldFreezeOrWipeOrOwvershipEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtFreezeOrWipeOrOwnershipData({ ...data, service: data.service || 'transferOwnership' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT control changes (upgrading props) transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldControlChangesEsdtSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
data,
} = body
const client = getEgldClient(provider)
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
data: await prepareEgldEsdtControlChangesData({ ...data, service: data.service || 'controlChanges' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT issue transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldDeployNftOrSftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0.05
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
// @ts-ignore
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldIssuanceNftOrSftData({ ...data, service: data.service || 'issueNonFungible' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT create NFT/SFT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldCreateNftOrSftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: sender,
value,
data: await prepareEgldCreateNftOrSftData({ ...data, service: data.service || 'ESDTNFTCreate' }),
}
// gas limit = 60000000 + (1500 * data.length) + (50000 * NFT size)
// const gasLimit = fee?.gasLimit ? fee.gasLimit : new BigNumber('60000000').plus((tx.data as string).length).multipliedBy(1500).toString()
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT transfer NFT create role transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldTransferNftCreateRoleSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldTransferNftCreateRoleData({ ...data, service: data.service || 'transferNFTCreateRole' }),
}
// gas limit = 60000000 + (1500 * data.length)
// const gasLimit = fee?.gasLimit ? fee.gasLimit : new BigNumber('60000000').plus((tx.data as string).length).multipliedBy(1500).toString()
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT stop NFT create transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldStopNftCreateSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldStopNftCreateData({ ...data, service: data.service || 'stopNFTCreate' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT Burn or Add quantity (SFT only) transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldAddOrBurnNftQuantitySignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: sender,
value,
data: await prepareEgldAddOrBurnNftQuantityData({ ...data, service: data.service || 'ESDTNFTBurn' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT freeze NFT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldFreezeNftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldFreezeOrWipeNftData({ ...data, service: data.service || 'freezeSingleNFT' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT freeze NFT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldWipeNftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: ESDT_SYSTEM_SMART_CONTRACT_ADDRESS,
value,
data: await prepareEgldFreezeOrWipeNftData({ ...data, service: data.service || 'wipeSingleNFT' }),
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign ESDT transfer NFT transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldTransferNftSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
amount,
data,
} = body
const client = getEgldClient(provider)
const value = amount ? new BigNumber(amount).toNumber() : 0
const sender = from || await generateAddressFromPrivatekey(Currency.EGLD, false, fromPrivateKey as string)
const tx: TransactionConfig = {
from: sender,
to: sender,
value,
data: await prepareEgldTransferNftData({ ...data as EsdtTransferNft, service: data.service || 'ESDTNFTTransfer' }),
}
// TRANSFER: GasLimit: 1000000 + length of Data field in bytes * 1500
// TRANSFER TO SMART CONTRACT: GasLimit: 1000000 + extra for smart contract call
// const gasLimit = fee?.gasLimit ? fee.gasLimit : new BigNumber('1000000').plus((tx.data as string).length).multipliedBy(1500).toString()
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Sign EGLD transaction with private keys locally. Nothing is broadcast to the blockchain.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction data to be broadcast to blockchain.
*/
export const prepareEgldSignedTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
await validateBody(body, EgldEsdtTransaction)
const {
fromPrivateKey,
signatureId,
from,
to,
amount,
data,
} = body
const client = getEgldClient(provider)
const tx: TransactionConfig = {
from: from || 0,
to: to,
value: amount,
data,
}
return await prepareSignedTransactionAbstraction(client, tx, signatureId, fromPrivateKey)
}
/**
* Send EGLD store data transaction to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldStoreDataTransaction = async (body: CreateRecord, provider?: string) =>
egldBroadcast(await prepareEgldStoreDataTransaction(body, provider), body.signatureId);
/**
* Send EGLD or supported ERC20 transaction to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldTransaction = async (body: EgldEsdtTransaction, provider?: string) =>
egldBroadcast(await prepareEgldSignedTransaction(body, provider), body.signatureId)
/**
* Send EGLD deploy ESDT transaction to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldDeployEsdtTransaction = async (body: EgldEsdtTransaction, provider?: string) =>
egldBroadcast(await prepareEgldDeployEsdtSignedTransaction(body, provider), body.signatureId)
/**
* Send EGLD invoke smart contract transaction to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldSmartContractMethodInvocationTransaction = async (body: EgldEsdtTransaction, provider?: string) => {
return egldBroadcast(await prepareEgldTransferEsdtSignedTransaction(body, provider), body.signatureId)
}
/**
* Send EGLD ERC721 transaction to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldTransferNftTransaction = async (body: EgldEsdtTransaction, provider?: string) =>
egldBroadcast(await prepareEgldTransferNftSignedTransaction(body, provider), body.signatureId)
/**
* Send EGLD NFT deploy to the blockchain. This method broadcasts signed transaction to the blockchain.
* This operation is irreversible.
* @param body content of the transaction to broadcast
* @param provider url of the EGLD Server to connect to. If not set, default public server will be used.
* @returns transaction id of the transaction in the blockchain
*/
export const sendEgldDeployNftTransaction = async (body: EgldEsdtTransaction, provider?: string) =>
egldBroadcast(await prepareEgldDeployNftOrSftSignedTransaction(body, provider), body.signatureId)
// TODO: add ERC-1155 support | the_stack |
import { ReallySmallEvents } from "really-small-events";
import {
assign,
allAsync,
cast,
cleanArgs,
chainAsync,
uuid as uuidFN,
noop,
throwErr,
setFast,
resolvePath,
isSafari,
deepGet,
buildQuery,
_nanoSQLQueue,
adapterFilters,
cleanArgs2
} from "./utilities";
import {
InanoSQLConfig,
InanoSQLFunction,
InanoSQLActionOrView,
InanoSQLQuery,
disconnectFilter,
InanoSQLDatabaseEvent,
extendFilter,
queryFilter,
eventFilter,
configFilter,
actionViewFilter,
InanoSQLAdapter,
willConnectFilter,
readyFilter,
InanoSQLTableColumn,
InanoSQLTableConfig,
InanoSQLTable,
InanoSQLInstance,
InanoSQLQueryBuilder,
customEventFilter,
VERSION,
TableQueryResult,
postConnectFilter,
onEventFilter,
offEventFilter,
InanoSQLV1ConfigFn,
InanoSQLFKActions,
uuid,
InanoSQLDBConfig
} from "./interfaces";
import { attachDefaultFns } from "./functions";
import { _nanoSQLQuery } from "./query";
import { _nanoSQLQueryBuilder } from "./query-builder";
import * as utils from "./utilities";
import { resolveMode } from "./adapter-detect";
import { SyncStorage } from "./adapters/syncStorage";
export {
InanoSQLInstance
}
// tslint:disable-next-line
export class nanoSQL implements InanoSQLInstance {
public dbs: {
[id: string]: InanoSQLDBConfig;
} = {};
public selectedDB: string = "nSQL_DB";
public indexTypes: {
[type: string]: (value: any) => any;
};
public version: number = VERSION;
public functions: {
[fnName: string]: InanoSQLFunction;
};
public events: {
[id: string]: {
Core: { [path: string]: ReallySmallEvents };
[eventName: string]: { [path: string]: ReallySmallEvents };
};
} = {};
public planetRadius: number = 6371;
public selectedTable: any;
public txs: {
[id: string]: {
table: string,
type: "put"|"del"|"idx-put"|"idx-del";
data: any;
}[]
} = {};
constructor() {
const str = (value: any) => {
return typeof value === "object" ? JSON.stringify(value) : String(value);
};
const num = (parseFn: (string: any) => number) => {
return (value: any) => {
return isNaN(value) || value === null ? 0 : parseFn(value);
}
}
this.indexTypes = {
string: str,
geo: (value: any) => {
return undefined;
},
float: num(parseFloat),
int: num(parseInt),
number: num(parseFloat),
date: num(parseInt),
uuid: str,
timeId: str,
timeIdms: str
};
this._checkTTL = this._checkTTL.bind(this);
attachDefaultFns(this);
}
public getDB(id?: string): InanoSQLDBConfig {
const useID = id || this.selectedDB;
if (!this.dbs[useID]) {
throw new Error(`Database ${useID} doesn't exist!`);
}
return this.dbs[useID];
}
public _rebuildFKs() {
// bust memoized caches
this.getDB().state.cacheId = uuidFN();
this.getDB()._fkRels = {};
Object.keys(this.getDB()._tables).forEach((tableName) => {
const table = this.getDB()._tables[tableName];
Object.keys(table.indexes).forEach((indexName) => {
const index = table.indexes[indexName];
if (index.props && index.props.foreignKey) {
const path = resolvePath(index.props.foreignKey.target);
const remoteTable = path.shift() as string;
if (!this.getDB()._fkRels[remoteTable]) {
this.getDB()._fkRels[remoteTable] = [];
}
this.getDB()._fkRels[remoteTable].push({
selfPath: path.map(s => s.replace(/\[\]/gmi, "")),
selfIsArray: index.props.foreignKey.target.indexOf("[]") !== -1,
childTable: tableName,
childPath: index.path,
childIsArray: index.isArray,
childIndex: indexName,
onDelete: index.props.foreignKey.onDelete || InanoSQLFKActions.NONE
});
}
})
});
}
public doFilter<T>(databaseID: string|undefined, filterName: string, args: T, complete: (result: T) => void, cancelled: (abortInfo: any) => void): void {
if (!databaseID) {
complete(args);
return;
}
if (this.dbs[databaseID] && this.getDB(databaseID).filters[filterName]) {
chainAsync(this.getDB(databaseID).filters[filterName], (item, i, nextFilter) => {
this.getDB(databaseID).filters[filterName][i](args, (newArgs) => {
args = newArgs;
nextFilter();
}, cancelled);
}).then(() => {
complete(args);
});
} else {
complete(args);
}
}
public getCache(id: string, args?: { offset: number, limit: number }): any[] {
if (!this.getDB()._queryCache[id]) {
throw new Error(`Cache "${id}" not found!`);
}
if (args) {
return this.getDB()._queryCache[id].slice(args.offset, args.offset + args.limit);
} else {
return this.getDB()._queryCache[id].slice();
}
}
public clearCache(id: string): boolean {
const exists = this.getDB()._queryCache[id] !== undefined;
delete this.getDB()._queryCache[id];
return exists;
}
public clearTTL(primaryKey: any): Promise<any> {
const k = this.selectedTable + "." + primaryKey;
return new Promise((res, rej) => {
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, "_ttl", "delete"),
where: ["key", "=", k]
}, noop, res, rej);
});
}
public expires(primaryKey: any): Promise<any> {
return new Promise((res, rej) => {
const k = this.selectedTable + "." + primaryKey;
let rows: any[] = [];
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, "_ttl", "select"),
where: ["key", "=", k]
}, (row) => {
rows.push(row);
}, () => {
if (!rows.length) {
res({ time: -1, cols: [] });
} else {
res({ time: (rows[0].date - Date.now()) / 1000, cols: rows[0].cols });
}
}, rej);
});
}
public _checkTTL() {
if (this.getDB().config.disableTTL) return;
if (this.getDB()._ttlTimer) {
clearTimeout(this.getDB()._ttlTimer);
}
let page = 0;
let nextTTL = 0;
const getPage = () => {
let rows: any[] = [];
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, "_ttl", "select"),
limit: 20,
offset: 20 * page
}, (row) => {
rows.push(row);
}, () => {
if (!rows.length) {
if (nextTTL) {
this.getDB()._ttlTimer = setTimeout(this._checkTTL, nextTTL - Date.now());
}
return;
}
chainAsync(rows, (row, i, next) => {
if (row.date < Date.now()) {
const clearTTL = () => {
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, "_ttl", "delete"),
where: ["key", "=", row.key]
}, noop, next, throwErr);
};
const rowData = row.key.split(".");
const table = rowData[0];
const key = ["float", "int", "number"].indexOf(this.getDB()._tables[table].pkType) === -1 ? rowData[1] : parseFloat(rowData[1]);
if (row.cols.length) {
let upsertObj = {};
row.cols.forEach((col) => {
upsertObj[col] = null;
});
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, table, "upsert"),
actionArgs: upsertObj,
where: [this.getDB()._tables[table].pkCol, "=", key]
}, noop, clearTTL, throwErr);
} else {
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, table, "delete"),
where: [this.getDB()._tables[table].pkCol, "=", key]
}, noop, clearTTL, throwErr);
}
} else {
nextTTL = Math.max(nextTTL, row.date);
next();
}
}).then(() => {
page++;
getPage();
});
}, throwErr);
};
getPage();
}
public selectTable(table?: string | any[] | ((where?: any[] | ((row: { [key: string]: any }, i?: number) => boolean)) => Promise<TableQueryResult>)): InanoSQLInstance {
if (table) {
this.selectedTable = table;
}
return this;
}
public getPeers() {
return JSON.parse(localStorage.getItem("nsql-peers-" + this.getDB().state.id) || "[]");
}
public _initPlugins(config: InanoSQLConfig): Promise<any> {
return new Promise((res, rej) => {
// Build plugin filters
let filterObj: { [filterName: string]: any[] } = {};
(config.plugins || []).forEach((plugin) => {
(plugin.filters || []).forEach((filter) => {
if (!filterObj[filter.name]) {
filterObj[filter.name] = [];
}
// prevent priority conflicts
let priority = filter.priority;
while (filterObj[filter.name][priority]) {
priority++;
}
// set callback
filterObj[filter.name][priority] = filter.call;
});
});
Object.keys(filterObj).forEach((filterName) => {
this.getDB().filters[filterName] = [];
filterObj[filterName].forEach((callback) => {
if (callback) {
this.getDB().filters[filterName].unshift(callback);
}
});
});
const checkVersionRange = (version: number, range: number[]): boolean => {
if (!range || !range.length) return true;
if (range.length === 1) {
return version >= range[0];
} else {
return version >= range[0] && version < range[1];
}
};
let hasError = false;
// check that dependencies are satisfied
(config.plugins || []).forEach((plugin) => {
if (plugin.dependencies) {
const dependencies = plugin.dependencies || {};
Object.keys(plugin.dependencies).forEach((pluginName: string, i, next) => {
if (pluginName === "core") {
if (!checkVersionRange(VERSION, dependencies[pluginName])) {
hasError = true;
rej(`Plugin "${plugin.name}" requires a different core version of nano-sql!`);
}
} else {
const dependency = (config.plugins || []).reduce((p, c) => c.name === pluginName ? c : p);
if (!dependency) {
hasError = true;
rej(`Plugin "${plugin.name}" requires plugin "${pluginName}" but it isn't installed!`);
}
if (!checkVersionRange(dependency.version, dependencies[pluginName])) {
hasError = true;
rej(`Plugin "${plugin.name}" requires a different version of "${pluginName}"!`);
}
}
});
}
});
if (!hasError) {
res();
}
});
}
public _saveTableIds(databaseID: string): Promise<any> {
return new Promise((res, rej) => {
this.triggerQuery(databaseID, {
...buildQuery(databaseID, this, "_util", "upsert"),
actionArgs: assign({
key: "tableIds",
value: this.getDB(databaseID)._tableIds
})
}, noop, res, rej);
})
}
public presetQuery(fn: string, args?: any): InanoSQLQueryBuilder {
if (typeof this.selectedTable !== "string") {
throw new Error(`Can't get table queries without selecting a table!`);
}
let found = Object.keys(this.getDB()._tables[this.selectedTable as string].queries).indexOf(fn) !== -1;
if (!found) {
throw new Error(`Can't find preset query ${fn}!`);
}
const fnArgs = this.getDB()._tables[this.selectedTable as string].queries[fn].args;
let filteredArgs: any = {};
if (fnArgs) {
filteredArgs = cleanArgs2(this.selectedDB, args, fnArgs, this);
}
const q = this.getDB()._tables[this.selectedTable as string].queries[fn].call(this, filteredArgs);
const queryBuilder = this.query("");
queryBuilder._query = q;
return queryBuilder;
}
public useDatabase(id: string): InanoSQLInstance {
this.selectedDB = id;
return this;
}
public createDatabase(config?: InanoSQLConfig): Promise<any> {
return this.connect(config);
}
public listDatabases(): string[] {
return Object.keys(this.dbs);
}
public dropDatabase(id: string): Promise<any> {
return new Promise((res, rej) => {
// drop all tables
const tables = Object.keys(this.getDB(id)._tables);
chainAsync(tables, (tableName, i, next, err) => {
const table = this.getDB(id)._tables[tableName];
this.triggerQuery(id, {
...buildQuery(id, this, table.name, "drop")
}, noop, () => {
next();
}, err);
}).then(() => {
// delete config data
delete this.dbs[id];
// done
res();
}).catch(rej);
});
}
public maybeCreateEventObject(id: string) {
if (!this.events[id]) {
this.events[id] = {
Core: {
"*": new ReallySmallEvents()
},
"*": { "*": new ReallySmallEvents() }
}
}
}
public connect(config: InanoSQLConfig = {}): Promise<any> {
let t = this;
const newDatabaseID = config.id ? String(config.id) : "nSQL_DB";
if (this.dbs[newDatabaseID]) {
throw new Error(`nSQL: ${newDatabaseID} database has already been created!`);
}
this.maybeCreateEventObject(newDatabaseID);
this.dbs[newDatabaseID] = {
adapter: new SyncStorage(),
_ttlTimer: 0,
_Q: new _nanoSQLQueue(),
state: {
activeAV: "",
hasAnyEvents: false,
peers: [],
pid: uuidFN(),
id: uuidFN(),
cacheId: uuidFN(),
peerEvents: [],
focused: true,
peerMode: false,
connected: false,
ready: false,
exportQueryObj: false
},
config: {
id: newDatabaseID,
},
_tables: {},
_fkRels: {},
_tableIds: { "_util": "_util", "_ttl": "_ttl" },
_queryCache: {},
filters: {}
}
this.selectedDB = newDatabaseID;
this._refreshEventChecker();
return this._initPlugins(config).then(() => {
return new Promise((res, rej) => {
this.doFilter<configFilter>(newDatabaseID, "config", { res: config }, (r) => {
res(r.res);
}, rej);
});
}).then((conf: InanoSQLConfig) => {
this.getDB(newDatabaseID).state.id = newDatabaseID;
this.getDB(newDatabaseID).config = {
plugins: [],
...conf
};
if (typeof window !== "undefined" && conf && conf.peer) {
this.getDB(newDatabaseID).state.peerMode = true;
}
return new Promise((res, rej) => {
this.doFilter<willConnectFilter>(newDatabaseID, "willConnect", { res: this }, () => { res() }, rej);
});
}).then(() => {
// setup and connect adapter
return new Promise((res, rej) => {
this.getDB(newDatabaseID).adapter = resolveMode(this.getDB(newDatabaseID).config.mode || "TEMP", this.getDB(newDatabaseID).config);
if (this.getDB(newDatabaseID).adapter.plugin) {
(this.getDB(newDatabaseID).config.plugins || []).push(this.getDB(newDatabaseID).adapter.plugin);
}
this._initPlugins(this.getDB(newDatabaseID).config).then(() => {
this.getDB(newDatabaseID).adapter.nSQL = this;
adapterFilters(newDatabaseID, this).connect(this.getDB(newDatabaseID).state.id, () => {
this.doFilter<postConnectFilter>(newDatabaseID, "postConnect", { res: this.getDB(newDatabaseID).config }, (config) => {
this.getDB(newDatabaseID).config = config.res;
res();
}, rej)
}, rej);
}).catch(rej);
if (this.getDB(newDatabaseID).config.planetRadius) {
this.planetRadius = this.getDB(newDatabaseID).config.planetRadius as number;
}
});
}).then(() => {
this.triggerEvent(newDatabaseID, {
target: "Core",
targetId: this.getDB(newDatabaseID).state.id,
path: "*",
events: ["connect"],
time: Date.now()
});
this.getDB(newDatabaseID).state.connected = true;
const tables = ["_util", "_ttl"].concat((this.getDB(newDatabaseID).config.tables || []).map(t => t.name));
return chainAsync(tables, (j, i, next, err) => {
switch (j) {
case "_util":
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_util", "create table"),
actionArgs: {
name: "_util",
model: {
"key:string": { pk: true },
"value:any": {}
},
_internal: true
}
}, noop, () => {
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_util", "select"),
where: ["key", "=", "tableIds"]
}, (row) => {
this.getDB(newDatabaseID)._tableIds = {
...this.getDB(newDatabaseID)._tableIds,
...row.value
}
}, () => {
next();
}, err);
}, err);
break;
case "_ttl":
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_ttl", "create table"),
actionArgs: {
name: "_ttl",
model: {
"key:string": { pk: true },
"table:string": {},
"cols:string[]": {},
"date:number": {}
},
_internal: true
}
}, noop, next, err);
break;
default:
const model = (this.getDB(newDatabaseID).config.tables || []).filter(t => t.name === j)[0];
if (!model) {
err("Table not found!");
return;
}
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, j, "create table"),
actionArgs: model
}, noop, next as any, err);
}
});
}).then(() => {
// migrate nanosql version as needed
return new Promise((res, rej) => {
let currentVersion: number;
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_util", "select"),
where: ["key", "=", "version"]
}, (row) => {
if (row) currentVersion = row.value;
}, () => {
if (!currentVersion || currentVersion < 2.0) {
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_util", "upsert"),
actionArgs: { key: "version", value: VERSION }
}, noop, res, rej);
} else {
// no migration code right now
res();
}
}, rej);
});
}).then(() => {
// migrate user database version as needed
return new Promise((res, rej) => {
if (!this.getDB(newDatabaseID).config.version) {
res();
return;
}
let currentVersion: number;
this.triggerQuery(newDatabaseID,{
...buildQuery(newDatabaseID, this, "_util", "select"),
where: ["key", "=", "db-version"]
}, (row) => {
if (row) currentVersion = row.value;
}, () => {
const saveVersion = (version: number, complete, err) => {
this.triggerQuery(newDatabaseID, {
...buildQuery(newDatabaseID, this, "_util", "upsert"),
actionArgs: { key: "db-version", value: version }
}, noop, complete, err);
};
// nothing to migrate, just set version
if (!currentVersion) {
saveVersion(this.getDB(newDatabaseID).config.version || 0, res, rej);
} else {
const upgrade = () => {
if (currentVersion === this.getDB(newDatabaseID).config.version) {
saveVersion(this.getDB(newDatabaseID).config.version || 0, res, rej);
} else {
const updateVersion = this.getDB(newDatabaseID).config.onVersionUpdate;
if (!updateVersion) {
saveVersion(this.getDB(newDatabaseID).config.version || 0, res, rej);
return;
}
updateVersion(currentVersion).then((newVersion) => {
currentVersion = newVersion;
saveVersion(currentVersion, () => {
setFast(upgrade);
}, rej);
}).catch(rej);
}
};
upgrade();
}
}, rej);
});
}).then(() => {
return new Promise((res, rej) => {
const event: InanoSQLDatabaseEvent = {
target: "Core",
path: "*",
targetId: this.getDB(newDatabaseID).state.id,
events: ["ready"],
time: Date.now()
};
this.doFilter<readyFilter>(newDatabaseID, "ready", { res: event }, (evnt) => {
this.triggerEvent(newDatabaseID, evnt.res);
this.getDB(newDatabaseID).state.ready = true;
if (!this.getDB(newDatabaseID).config.disableTTL) {
this._checkTTL();
}
if (this.getDB(newDatabaseID).config.peer) {
this._initPeers();
}
res();
}, rej);
});
});
}
public _initPeers() {
let counter = 0;
this.getDB().state.pid = uuidFN();
// Append this peer to the network
this.getDB().state.peers = this.getPeers();
this.getDB().state.peers.unshift(this.getDB().state.pid);
localStorage.setItem("nsql-peers-" + this.getDB().state.id, JSON.stringify(this.getDB().state.peers));
// When localstorage changes we may need to possibly update the peer list
// or possibly respond to an event from another peer
window.addEventListener("storage", (e) => {
// peer list updated
if (e.key === "nsql-peers-" + this.getDB().state.id) {
this.getDB().state.peers = this.getPeers();
}
// recieved event from another peer
if (e.key && e.key.indexOf(this.getDB().state.pid + ".") === 0) {
localStorage.removeItem(e.key);
const ev: InanoSQLDatabaseEvent = JSON.parse(e.newValue || "{}");
this.getDB().state.peerEvents.push(ev.query.queryID || "");
this.triggerEvent(this.selectedDB, {
...ev,
types: ["peer change"]
});
setFast(() => {
this.triggerEvent(this.selectedDB, ev);
});
}
// the "master" peer checks to make sure all peers have been
// cleaning up their mess every 50 requests, if they aren't they
// are removed. Keeps localStorage from filling up accidentally.
counter++;
if (counter > 50 && this.getDB().state.peers[0] === this.getDB().state.pid) {
counter = 0;
let len = localStorage.length;
let peerKeys: { [id: string]: string[] } = {};
while (len--) {
const key = localStorage.key(len);
// only grab events
const keyMatch = key ? key.match(/\w{8}-\w{4}-\w{4}-\w{4}-\w{8}/gmi) : null;
if (key && keyMatch) {
const peerID = (keyMatch || [""])[0];
if (!peerKeys[peerID]) {
peerKeys[peerID] = [];
}
peerKeys[peerID].push(key);
}
}
Object.keys(peerKeys).forEach((peerID) => {
// purge peers that aren't cleaning up their mess (and thus probably gone)
if (peerKeys[peerID].length > 10) {
this.getDB().state.peers = this.getDB().state.peers.filter(p => p !== peerID);
peerKeys[peerID].forEach((key) => {
localStorage.removeItem(key);
});
localStorage.setItem("nsql-peers-" + this.getDB().state.id, JSON.stringify(this.getDB().state.peers));
}
});
}
});
window.onblur = () => {
this.getDB().state.focused = false;
};
// on focus we set this nsql to focused and move it's peer position
// to the front
window.onfocus = () => {
// set this peer to master on focus
this.getDB().state.peers = this.getDB().state.peers.filter((p) => p !== this.getDB().state.pid);
this.getDB().state.peers.unshift(this.getDB().state.pid);
localStorage.setItem("nsql-peers-" + this.getDB().state.id, JSON.stringify(this.getDB().state.peers));
this.getDB().state.focused = true;
};
// send events to the peer network
nSQL("*").on("change", (ev) => {
const idxOf = this.getDB().state.peerEvents.indexOf(ev.query.queryID || "");
if (idxOf !== -1) {
this.getDB().state.peerEvents.splice(idxOf, 1);
return;
}
this.getDB().state.peers.filter(p => p !== this.getDB().state.pid).forEach((p) => {
localStorage.setItem(p + "." + ev.query.queryID, JSON.stringify(ev));
});
});
// Remove self from peer network
window.addEventListener("beforeunload", () => {
this.getDB().state.peers = this.getDB().state.peers.filter((p) => p !== this.getDB().state.pid);
localStorage.setItem("nsql-peers-" + this.getDB().state.id, JSON.stringify(this.getDB().state.peers));
return false;
});
}
public every(args: { length: number, every?: number, offset?: number }): number[] {
let i = 0;
let arr: number[] = [];
while (i <= args.length) {
if (args.every) {
if (i % args.every === 0) {
arr.push(i + (args.offset || 0));
}
} else {
arr.push(i + (args.offset || 0));
}
i++;
}
return arr;
}
public on(action: string, callBack: (event: InanoSQLDatabaseEvent) => void, selectTable?: string): void {
let l: string = selectTable || (typeof this.selectedTable !== "string" ? "" : this.selectedTable) as string;
const selDB = this.selectedDB;
this.maybeCreateEventObject(selDB);
this.doFilter<onEventFilter>(selDB, "onEvent", { res: { action, callback: callBack } }, (newEvent) => {
switch (newEvent.res.action) {
case "connect":
case "ready":
case "disconnect":
case "peer change":
case "slow query":
this.events[selDB].Core["*"].on(newEvent.res.action, newEvent.res.callback);
break;
case "select":
case "change":
case "delete":
case "upsert":
case "*":
const table = resolvePath(l);
if (!this.events[selDB][table[0]]) {
this.events[selDB][table[0]] = {
"*": new ReallySmallEvents()
};
}
const nestedPath = table.filter((v, i) => i > 0).join(".") || "*";
if (!this.events[selDB][table[0]][nestedPath]) {
this.events[selDB][table[0]][nestedPath] = new ReallySmallEvents();
}
this.events[selDB][table[0]][nestedPath].on(newEvent.res.action, newEvent.res.callback);
break;
default:
new Promise((res, rej) => {
this.doFilter<customEventFilter>(selDB, "customEvent", { res: { nameSpace: "", path: "*" }, selectedTable: l, action: action, on: true }, res, rej);
}).then((evData: customEventFilter) => {
if (evData.res.nameSpace) {
if (!this.events[selDB][evData.res.nameSpace]) {
this.events[selDB][evData.res.nameSpace] = {
"*": new ReallySmallEvents()
};
}
if (!this.events[selDB][evData.res.nameSpace][evData.res.path]) {
this.events[selDB][evData.res.nameSpace][evData.res.path] = new ReallySmallEvents();
}
this.events[selDB][evData.res.nameSpace][evData.res.path].on(newEvent.res.action, newEvent.res.callback);
} else {
throw new Error(`Invalid event "${action}"!`);
}
this._refreshEventChecker();
});
}
this._refreshEventChecker();
}, noop);
}
public off(action: string, callBack: (event: InanoSQLDatabaseEvent) => void, selectTable?: string): void {
let l: string = selectTable || (typeof this.selectedTable !== "string" ? "" : this.selectedTable) as string;
const selDB = this.selectedDB;
this.maybeCreateEventObject(selDB);
this.doFilter<offEventFilter>(selDB, "offEvent", { res: { action, callback: callBack } }, (newEvent) => {
switch (newEvent.res.action) {
case "connect":
case "ready":
case "disconnect":
case "peer change":
case "slow query":
this.events[selDB].Core["*"].off(newEvent.res.action, newEvent.res.callback);
break;
case "select":
case "change":
case "delete":
case "upsert":
case "*":
const table = resolvePath(l);
if (!this.events[selDB][table[0]]) {
this.events[selDB][table[0]] = {
"*": new ReallySmallEvents()
};
}
const nestedPath = table.filter((v, i) => i > 0).join(".") || "*";
if (!this.events[selDB][table[0]][nestedPath]) {
this.events[selDB][table[0]][nestedPath] = new ReallySmallEvents();
}
this.events[selDB][table[0]][nestedPath].off(newEvent.res.action, newEvent.res.callback);
break;
default:
new Promise((res, rej) => {
this.doFilter<customEventFilter>(selDB, "customEvent", { res: { nameSpace: "", path: "*" }, selectedTable: l, action: action, on: true }, res, rej);
}).then((evData: customEventFilter) => {
if (evData.res.nameSpace) {
if (!this.events[selDB][evData.res.nameSpace]) {
this.events[selDB][evData.res.nameSpace] = {
"*": new ReallySmallEvents()
};
}
if (!this.events[selDB][evData.res.nameSpace][evData.res.path]) {
this.events[selDB][evData.res.nameSpace][evData.res.path] = new ReallySmallEvents();
}
this.events[selDB][evData.res.nameSpace][evData.res.path].off(newEvent.res.action, newEvent.res.callback);
} else {
throw new Error(`Invalid event "${action}"!`);
}
this._refreshEventChecker();
});
}
this._refreshEventChecker();
}, noop);
}
public _refreshEventChecker(): InanoSQLInstance {
if (!this.dbs[this.selectedDB]) return this;
this.getDB().state.hasAnyEvents = Object.keys(this.events[this.selectedDB]).reduce((prev, cur) => {
if (prev === true) return true;
const length = Object.keys(this.events[this.selectedDB][cur]).reduce((p, key) => {
return Object.keys(this.events[this.selectedDB][cur][key].eventListeners).length + p;
}, 0);
return length > 0 ? true : prev;
}, false as boolean);
return this;
}
public getView(viewName: string, viewArgs: any): Promise<any> {
return this._doAV("v", this.selectedTable as any, viewName, viewArgs);
}
public doAction(actionName: string, actionArgs: any): Promise<any> {
return this._doAV("a", this.selectedTable as any, actionName, actionArgs);
}
public _doAV(AVType: "a" | "v", table: string, AVName: string, AVArgs: any): Promise<any> {
if (typeof this.selectedTable !== "string") return Promise.reject("Can't do Action/View with selected table!");
return new Promise((res, rej) => {
this.doFilter<actionViewFilter>(this.selectedDB, "actionView", {
res: {
AVType,
table,
AVName,
AVArgs
}
}, res, rej);
}).then((actionOrView: actionViewFilter) => {
const key = actionOrView.res.AVType === "a" ? "actions" : "views";
const selAV: InanoSQLActionOrView | null = this.getDB()._tables[actionOrView.res.table][key].reduce((prev, cur) => {
if (cur.name === actionOrView.res.AVName) return cur;
return prev;
}, null as any);
if (!selAV) {
return new Promise((res, rej) => rej(`${actionOrView.res.AVType} "${actionOrView.res.AVName}" Not Found!`));
}
return selAV.call(selAV.args ? cleanArgs(this.selectedDB, selAV.args, actionOrView.res.AVArgs, this) : {}, this);
});
}
public query(action: string | ((nSQL: InanoSQLInstance) => InanoSQLQuery), args?: any): InanoSQLQueryBuilder {
if (this.selectedDB && typeof this.selectTable === "string") {
const av = this.getDB().state.activeAV;
this.getDB().state.activeAV = "";
return new _nanoSQLQueryBuilder(this.selectedDB, this, this.selectedTable, action, args, av);
} else {
return new _nanoSQLQueryBuilder(this.selectedDB, this, this.selectedTable, action, args, "");
}
}
public triggerQuery(databaseID: string|undefined, query: InanoSQLQuery, onRow: (row: any) => void, complete: () => void, error: (err: string) => void): void {
const execQuery = (setQuery) => {
new _nanoSQLQuery(databaseID, this, setQuery.res, (row) => {
onRow(row);
}, complete, error);
};
if (typeof query.table === "string") {
if (!this.getDB(databaseID).state.connected) {
error("nSQL: Can't do a query before the database is connected!");
return;
}
this.doFilter<queryFilter>(databaseID, "query", { res: query }, execQuery, error);
} else {
execQuery({ res: query });
}
}
public triggerEvent(databaseID: string|undefined, eventData: InanoSQLDatabaseEvent, ignoreStarTable?: boolean): InanoSQLInstance {
if (!databaseID) return this;
if (!this.events[databaseID]) return this;
this.doFilter<eventFilter>(databaseID, "event", { res: eventData }, (event) => {
if (this.getDB(databaseID).state.hasAnyEvents) {
setFast(() => {
event.res.events.forEach((evnt) => {
if (!ignoreStarTable) {
Object.keys(this.events[databaseID]["*"]).forEach((path) => {
this.events[databaseID]["*"][path].trigger(evnt, event.res);
});
}
if (!this.events[databaseID][event.res.target]) return;
if (event.res.path === "_all_") {
Object.keys(this.events[databaseID][event.res.target]).forEach((path) => {
this.events[databaseID][event.res.target][path].trigger(evnt, event.res);
});
} else {
if (!this.events[databaseID][event.res.target][event.res.path]) return;
this.events[databaseID][event.res.target][event.res.path].trigger(evnt, event.res);
}
});
});
}
}, (err) => {
console.log("Event suppressed", err);
});
return this;
}
private _countTimers: {
[key: string]: (nSQL: InanoSQLInstance ,dbId: string, tableName: string) => void;
} = {};
public saveCount(databaseID: string, tableName: string, complete?: (err?: any) => void) {
if (tableName.indexOf("_") === 0) {
if (complete) complete();
return;
}
const doUpdate = (parent: InanoSQLInstance, dbID: string, table: string, done?: (err?: any) => void) => {
const total = parent.getDB(dbID)._tables[table].count;
const id = parent.getDB(dbID)._tables[table].id;
parent.triggerQuery(dbID, {
...buildQuery(dbID, parent, "_util", "upsert"),
actionArgs: {key: "total_" + id, value: total},
}, noop, () => {
if (done) done();
}, (err) => {
if (done) done(err);
console.error("nSQL: Error updating table total.", err);
});
}
// do now
if (complete) {
doUpdate(this, databaseID, tableName, complete);
return;
}
// do later
if (!this._countTimers[databaseID + tableName]) {
this._countTimers[databaseID + tableName] = utils.throttle(undefined, doUpdate, 1000);
}
this._countTimers[databaseID + tableName](this, databaseID, tableName);
}
public default(databaseID: string|undefined, replaceObj?: any, table?: string): { [key: string]: any } | Error {
if (!databaseID) return replaceObj;
replaceObj = replaceObj || {};
if (!table && typeof this.selectedTable !== "string") {
throw new Error("Must select table to generate defualts!");
}
table = (table || this.selectedTable as any) as string;
if (!this.getDB(databaseID)._tables[table]) {
throw new Error(`nSQL: Table "${table}" not found in database ${databaseID} for generating default object!`);
}
let error = "";
const resolveModel = (cols: InanoSQLTableColumn[], useObj?: any, nestedModel?: string): any => {
let newObj = {};
useObj = useObj || {};
if (nestedModel && nestedModel.length) {
if (nestedModel.indexOf("[]") !== -1) {
if (Array.isArray(useObj)) {
return useObj.map(a => resolveModel(cols, a, nestedModel.slice(0, nestedModel.lastIndexOf("[]"))));
} else {
return [];
}
}
}
let hasWildCard: boolean = false;
cols.forEach((m) => {
if (m.key === "*") {
hasWildCard = true;
return;
}
if (m.model) {
if (m.type.indexOf("[]") !== -1) {
const arr = typeof useObj !== "undefined" ? useObj[m.key] : [];
if (!Array.isArray(arr)) {
newObj[m.key] = [];
} else {
newObj[m.key] = arr.map(a => resolveModel(m.model as any[], a, m.type.slice(0, m.type.lastIndexOf("[]"))));
}
} else {
newObj[m.key] = resolveModel(m.model, typeof useObj !== "undefined" ? useObj[m.key] : undefined);
}
} else {
let value = typeof useObj[m.key] !== "undefined" ? cast(databaseID, m.type, useObj[m.key], false, this) : (typeof m.default === "function" ? m.default(replaceObj) : m.default);
if (typeof m.max !== "undefined" && value > m.max) {
error = `Data error, column ${m.key} can't be greater than ${m.max}!`
}
if (typeof m.min !== "undefined" && value < m.min) {
error = `Data error, column ${m.key} can't be less than ${m.min}!`
}
newObj[m.key] = value;
}
if (m.notNull && (newObj[m.key] === null || newObj[m.key] === undefined)) {
error = `Data error, ${m.key} cannot be null!`;
}
if (newObj[m.key] === null) {
newObj[m.key] = undefined;
}
});
if (error.length) {
throw new Error(error);
}
if (hasWildCard && useObj) {
const keys = cols.map(c => c.key);
Object.keys(useObj).filter(c => keys.indexOf(c) === -1).forEach((key) => {
newObj[key] = useObj[key];
});
}
return newObj;
};
return resolveModel(this.getDB(databaseID)._tables[table].columns, replaceObj);
}
/*
public batch(tables: {[table: string]: {[place: string]: {type: "put"|"del", data: any}}}, complete: () => void, error: (message) => void) {
}
*/
public rawDump(tables: string[], indexes: boolean, onRow: (table: string, row: { [key: string]: any }) => void): Promise<any> {
const exportTables = indexes ? tables : Object.keys(this.getDB()._tables).filter(t => tables.length ? tables.indexOf(t) !== -1 : true);
return chainAsync(exportTables, (table: string, i, nextTable, err) => {
if (indexes) {
const tableName = table.indexOf(":") !== -1 ? table.split(":")[0] : table;
const tableIndexes = table.indexOf(":") !== -1 ? [table.split(":")[1]] : Object.keys(this.getDB()._tables[table].indexes);
chainAsync(tableIndexes, (index, i, nextIdx, errIdx) => {
adapterFilters(this.selectedDB, this).readIndexKeys(tableName, index, "all", undefined, undefined, false, (key, id) => {
onRow(tableName + "." + index, { indexId: id, rowId: key });
}, nextIdx, errIdx);
}).then(nextTable).catch(err);
} else {
adapterFilters(this.selectedDB, this).readMulti(table, "all", undefined, undefined, false, (row) => {
onRow(table, row);
}, nextTable, err || noop);
}
});
}
public rawImport(tables: { [table: string]: { [key: string]: any }[] }, indexes: boolean, onProgress?: (percent: number) => void): Promise<any> {
let progress = 0;
const totalLength = Object.keys(tables).reduce((p, c) => {
return p += tables[c].length, p;
}, 0);
const selectedDB = this.selectedDB;
const usableTables = Object.keys(this.getDB()._tables);
const importTables: string[] = indexes ? Object.keys(tables) : Object.keys(tables).filter(t => usableTables.indexOf(t) !== -1);
return chainAsync(importTables, (table, i, next, err) => {
if (indexes) {
// tableName:IndexName
const tableName = table.split(".")[0];
const indexName = table.split(".")[1];
chainAsync(tables[table], (indexRow, ii, nextIdx, errIdx) => {
adapterFilters(selectedDB, this).addIndexValue(tableName, indexName, indexRow.rowId, indexRow.indexId, nextIdx, errIdx);
}).then(next).catch(err);
} else {
const pk = this.getDB()._tables[table].pkCol;
// this.getDB()._tables[table].count = tables[table].length;
const batchFN = this.getDB().adapter.batch;
if (batchFN) { // batch writes supported
const tableId = this.getDB()._tableIds[table];
batchFN.apply(this.getDB().adapter, [tableId, tables[table].map((r) => {
progress++;
if (onProgress) onProgress(Math.round((progress / totalLength) * 10000) / 100);
return {type: "put", data: r}
}), () => {
next();
}, err]);
} else { // not supported
console.warn("Batch import not using transaction, transactions not supported by adapter!");
chainAsync(tables[table], (row, ii, nextRow, rowErr) => {
if (!deepGet(pk, row) && rowErr) {
rowErr("No primary key found, can't import: " + JSON.stringify(row));
return;
}
adapterFilters(selectedDB, this).write(table, deepGet(pk, row), row, (newRow) => {
nextRow();
progress++;
if (onProgress) onProgress(Math.round((progress / totalLength) * 10000) / 100);
}, rowErr || noop);
}).then(() => {
this.saveCount(selectedDB, table);
next();
}).catch(err);
}
}
});
}
public disconnect(dbID?: string) {
return new Promise((res, rej) => {
const Databases = dbID ? [dbID] : Object.keys(this.dbs);
chainAsync(Databases, (dbID, i, next, err) => {
this.doFilter<disconnectFilter>(dbID, "disconnect", {}, () => {
adapterFilters(dbID, this).disconnect(() => {
delete this.dbs[dbID];
next();
}, err);
}, err);
}).then(() => {
res();
}).catch(rej);
});
}
public extend(scope: string, ...args: any[]): any | nanoSQL {
return new Promise((res, rej) => {
this.doFilter<extendFilter>(this.selectedDB, "extend", { scope: scope, args: args, res: null }, res, rej);
});
}
public loadJS(rows: { [key: string]: any }[], onProgress?: (percent: number) => void, parallel?: boolean): Promise<any[]> {
const table = this.selectedTable;
if (typeof table !== "string") {
return Promise.reject("nSQL: Can't load JS into temporary table!");
}
const total = rows.length;
let count = 0;
const async = parallel ? allAsync : chainAsync;
return async(rows, (row, i, next, err) => {
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, table, "upsert"),
actionArgs: row
}, (r) => {
}, () => {
count++;
if (onProgress) onProgress(((count / total) * 10000) / 100);
next();
}, err as any);
});
}
public JSONtoCSV(json: any[], printHeaders?: boolean, useHeaders?: string[]): string {
let csv: string[] = [];
if (!json.length) {
return "";
}
let columnHeaders: string[] = [];
if (useHeaders) {
// use provided headers (much faster)
columnHeaders = useHeaders;
} else {
// auto detect headers
json.forEach((json) => {
columnHeaders = Object.keys(json).concat(columnHeaders);
});
columnHeaders = columnHeaders.filter((v, i, s) => s.indexOf(v) === i);
}
if (printHeaders) {
csv.push(columnHeaders.map(c => `"${c}"`).join(","));
}
json.forEach((row) => {
csv.push(columnHeaders.map((k) => {
if (row[k] === null || row[k] === undefined) {
return "";
}
if (typeof row[k] === "string") {
// tslint:disable-next-line
return "\"" + (row[k]).replace(/\"/g, '\"\"') + "\"";
}
if (typeof row[k] === "boolean") {
return row[k] === true ? "true" : "false";
}
// tslint:disable-next-line
return typeof row[k] === "object" ? "\"" + JSON.stringify(row[k]).replace(/\"/g, '\"\"') + "\"" : row[k];
}).join(","));
});
return csv.join("\n");
}
public csvToArray(text: string): any[] {
// tslint:disable-next-line
let p = '', row = [''], ret = [row], i = 0, r = 0, s = !0, l;
for (l of text) {
// tslint:disable-next-line
if ('"' === l) {
if (s && l === p) row[i] += l;
s = !s;
// tslint:disable-next-line
} else if (',' === l && s) l = row[++i] = '';
// tslint:disable-next-line
else if ('\n' === l && s) {
// tslint:disable-next-line
if ('\r' === p) row[i] = row[i].slice(0, -1);
// tslint:disable-next-line
row = ret[++r] = [l = '']; i = 0;
} else row[i] += l;
p = l;
}
return ret[0];
}
public CSVtoJSON(csv: string, rowMap?: (row: any) => any): any {
let t = this;
let fields: Array<string> = [];
return csv.split(/\r?\n|\r|\t/gmi).map((v, k) => {
if (k === 0) {
fields = v.split(",").map(s => s.substring(1, s.length - 1));
return undefined;
} else {
if (String(v).trim().length < 1) return undefined;
let row = this.csvToArray(v);
if (!row) return undefined;
row = row.map(r => r.trim());
let i = fields.length;
let record: { [key: string]: any } = {};
while (i--) {
if (row[i]) {
if (row[i] === "true" || row[i] === "false") {
record[fields[i]] = row[i] === "true";
} else if (row[i].indexOf("{") === 0 || row[i].indexOf("[") === 0) {
// tslint:disable-next-line
try {
record[fields[i]] = JSON.parse(row[i]);
} catch (e) {
record[fields[i]] = row[i];
}
// tslint:disable-next-line
} else if (row[i].indexOf('"') === 0) {
record[fields[i]] = row[i].slice(1, row[i].length - 1).replace(/\"\"/gmi, "\"");
} else {
record[fields[i]] = row[i];
}
}
}
if (rowMap) {
return rowMap(record);
}
return record;
}
}).filter(r => r);
}
public loadCSV(csvString: string, rowMap?: (row: any) => any, onProgress?: (percent: number) => void, parallel?: boolean): Promise<any[]> {
const table = this.selectedTable;
if (typeof table !== "string") {
return Promise.reject("nSQL: Can't load CSV into temporary table!");
}
const rowData = this.CSVtoJSON(csvString, rowMap);
const async = parallel ? allAsync : chainAsync;
let count = 0;
return async(rowData, (row, i, nextRow, err) => {
this.triggerQuery(this.selectedDB, {
...buildQuery(this.selectedDB, this, table, "upsert"),
actionArgs: row
}, noop, () => {
count++;
if (onProgress) onProgress(Math.round((count / rowData.length) * 10000) / 100);
nextRow();
}, err || noop);
});
}
}
export const nSQLv1Config = (doConfig: (nSQLv1: (table?: string) => InanoSQLV1ConfigFn) => void): InanoSQLConfig => {
let tables: { [tableName: string]: InanoSQLTableConfig } = {};
let conf: any = {};
let selTable: string = "";
const nSQLv1 = (table?: string) => {
selTable = table || selTable;
if (selTable && !tables[selTable]) {
tables[selTable] = {
name: selTable,
model: {},
indexes: {},
actions: [],
views: []
}
}
return {
model: (dataModels: { key: string, type: string, props?: any[], default?: any }[]) => {
let indexes: InanoSQLTableConfig["indexes"] = {};
tables[selTable].model = dataModels.reduce((prev, cur) => {
const key = cur.key + ":" + cur.type;
prev[key] = {};
if (cur.props) {
if (cur.props.indexOf("pk") !== -1) {
prev[key].pk = true;
}
if (cur.props.indexOf("ai") !== -1) {
prev[key].ai = true;
}
if (indexes && cur.props.indexOf("idx") !== -1) {
indexes[key] = {};
}
}
return prev;
}, {});
tables[selTable].indexes = indexes;
return nSQLv1(table);
},
actions: (actions: InanoSQLActionOrView[]) => {
tables[selTable].actions = actions;
return nSQLv1(table);
},
views: (views: InanoSQLActionOrView[]) => {
tables[selTable].views = views;
return nSQLv1(table);
},
config: (obj: { [key: string]: any }) => {
conf = obj;
return nSQLv1(table);
},
table: (ta?: string) => {
return nSQLv1(ta);
},
rowFilter: (callback: (row: any) => any) => {
tables[selTable].filter = callback;
return nSQLv1(table);
}
}
};
doConfig(nSQLv1);
return {
id: conf.id || "nanoSQL_DB",
...conf,
tables: Object.keys(tables).map(t => tables[t]),
};
}
/**
* @internal
*/
let _nanoSQLStatic = new nanoSQL();
export const nSQL = (table?: string | any[] | ((where?: any[] | ((row: { [key: string]: any }, i?: number) => boolean)) => Promise<TableQueryResult>)) => {
return _nanoSQLStatic.selectTable(table);
};
if (typeof window !== "undefined") {
if (!window["@nano-sql"]) {
window["@nano-sql"] = {};
}
window["@nano-sql"].core = {
nSQL: nSQL,
nanoSQL: nanoSQL,
utilities: utils,
nSQLv1Config
};
window["@nano-sql/core"] = window["@nano-sql"].core;
}
/*
// used to test browser adapters with live reload
let errors = 0;
console.log("Testing IndexedDB");
new nanoSQLAdapterTest(IndexedDB, []).test().then(() => {
console.log("Testing WebSQL");
new nanoSQLAdapterTest(WebSQL, []).test().then(() => {
console.log("Tests Complete");
}).catch((err) => {
console.error(err);
errors++;
});
}).catch((err) => {
console.error(err);
errors++;
});*/ | the_stack |
import * as fs from 'fs-extra';
import { compoundExpression, forEach, iff, list, methodCall, obj, print, ref, ret, set, str } from 'graphql-mapping-template';
import { graphqlName, plurality, toUpper } from 'graphql-transformer-common';
import AppSync from 'cloudform-types/types/appSync';
import { DocumentNode } from 'graphql';
import { Fn } from 'cloudform-types';
import { RelationalDBMappingTemplate } from './RelationalDBMappingTemplate';
import { ResourceConstants } from './ResourceConstants';
import { TemplateContext } from './RelationalDBSchemaTransformer';
const s3BaseUrl = 's3://${S3DeploymentBucket}/${S3DeploymentRootKey}/resolvers/${ResolverFileName}';
const resolverFileName = 'ResolverFileName';
const rdsResponseErrorMessage = 'Invalid response from RDS DataSource. See info for the full response.';
const rdsResponseErrorType = 'InvalidResponse';
/**
* This Class is responsible for Generating the RDS Resolvers based on the
* GraphQL Schema + Metadata of the RDS Cluster (i.e. Primary Keys for Tables).
*
* It will generate the CRUDL+Q (Create, Retrieve, Update, Delete, List + Queries) Resolvers as
* Cloudform Resources so that they may be added on to the base template that the
* RelationDBTemplateGenerator creates.
*/
export class RelationalDBResolverGenerator {
document: DocumentNode;
typePrimaryKeyMap: Map<string, string>;
stringFieldMap: Map<string, string[]>;
intFieldMap: Map<string, string[]>;
resolverFilePath: string;
typePrimaryKeyTypeMap: Map<string, string>;
constructor(context: TemplateContext) {
this.document = context.schemaDoc;
this.typePrimaryKeyMap = context.typePrimaryKeyMap;
this.stringFieldMap = context.stringFieldMap;
this.intFieldMap = context.intFieldMap;
this.typePrimaryKeyTypeMap = context.typePrimaryKeyTypeMap;
}
/**
* Creates the CRUDL+Q Resolvers as a Map of Cloudform Resources. The output can then be
* merged with an existing Template's map of Resources.
*/
public createRelationalResolvers(resolverFilePath: string, improvePluralization: boolean) {
let resources = {};
this.resolverFilePath = resolverFilePath;
this.typePrimaryKeyMap.forEach((value: string, key: string) => {
const resourceName = key.replace(/[^A-Za-z0-9]/g, '');
resources = {
...resources,
...{ [resourceName + 'CreateResolver']: this.makeCreateRelationalResolver(key) },
...{ [resourceName + 'GetResolver']: this.makeGetRelationalResolver(key) },
...{ [resourceName + 'UpdateResolver']: this.makeUpdateRelationalResolver(key) },
...{ [resourceName + 'DeleteResolver']: this.makeDeleteRelationalResolver(key) },
...{ [resourceName + 'ListResolver']: this.makeListRelationalResolver(key, improvePluralization) },
};
// TODO: Add Guesstimate Query Resolvers
});
return resources;
}
/**
* Private Helpers to Generate the CFN Spec for the Resolver Resources
*/
/**
* Creates and returns the CFN Spec for the 'Create' Resolver Resource provided
* a GraphQL Type as the input
*
* @param type - the graphql type for which the create resolver will be created
* @param mutationTypeName - will be 'Mutation'
*/
private makeCreateRelationalResolver(type: string, mutationTypeName: string = 'Mutation') {
const tableName = this.getTableName(type);
const operationType = GRAPHQL_RESOLVER_OPERATION.Create;
const fieldName = this.getFieldName(type, operationType);
const createSql = this.generateInsertStatement(type);
const selectSql = this.generateSelectByPrimaryKeyStatement(type, operationType);
const reqFileName = `${mutationTypeName}.${fieldName}.req.vtl`;
const resFileName = `${mutationTypeName}.${fieldName}.res.vtl`;
const reqTemplate = print(
compoundExpression([
set(ref('cols'), list([])),
set(ref('vals'), list([])),
forEach(ref('entry'), ref(`ctx.args.create${tableName}Input.keySet()`), [
set(ref('discard'), ref(`cols.add($entry)`)),
set(ref('discard'), ref(`vals.add("'$ctx.args.create${tableName}Input[$entry]'")`)),
]),
set(ref('valStr'), ref('vals.toString().replace("[","(").replace("]",")")')),
set(ref('colStr'), ref('cols.toString().replace("[","(").replace("]",")")')),
RelationalDBMappingTemplate.rdsQuery({
statements: list([str(createSql), str(selectSql)]),
}),
]),
);
const resTemplate = print(ref('utils.toJson($utils.parseJson($utils.rds.toJsonString($ctx.result))[1][0])'));
fs.writeFileSync(`${this.resolverFilePath}/${reqFileName}`, reqTemplate, 'utf8');
fs.writeFileSync(`${this.resolverFilePath}/${resFileName}`, resTemplate, 'utf8');
let resolver = new AppSync.Resolver({
ApiId: Fn.Ref(ResourceConstants.PARAMETERS.AppSyncApiId),
DataSourceName: Fn.GetAtt(ResourceConstants.RESOURCES.RelationalDatabaseDataSource, 'Name'),
TypeName: mutationTypeName,
FieldName: fieldName,
RequestMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: reqFileName,
}),
ResponseMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: resFileName,
}),
}).dependsOn([ResourceConstants.RESOURCES.RelationalDatabaseDataSource]);
return resolver;
}
/**
* Creates and Returns the CFN Spec for the 'Get' Resolver Resource provided
* a GraphQL type
*
* @param type - the graphql type for which the get resolver will be created
* @param queryTypeName - will be 'Query'
*/
private makeGetRelationalResolver(type: string, queryTypeName: string = 'Query') {
const operationType = GRAPHQL_RESOLVER_OPERATION.Get;
const fieldName = this.getFieldName(type, operationType);
const selectSql = this.generateSelectByPrimaryKeyStatement(type, operationType);
const reqFileName = `${queryTypeName}.${fieldName}.req.vtl`;
const resFileName = `${queryTypeName}.${fieldName}.res.vtl`;
const reqTemplate = print(
compoundExpression([
RelationalDBMappingTemplate.rdsQuery({
statements: list([str(selectSql)]),
}),
]),
);
const resTemplate: string = print(
compoundExpression([
set(ref('output'), ref('utils.rds.toJsonObject($ctx.result)')),
iff(
ref('output.isEmpty()'),
methodCall(ref('util.error'), str(rdsResponseErrorMessage), str(rdsResponseErrorType), obj({}), ref('output')),
),
set(ref('output'), ref('output[0]')),
iff(ref('output.isEmpty()'), ret()),
methodCall(ref('utils.toJson'), ref('output[0]')),
]),
);
fs.writeFileSync(`${this.resolverFilePath}/${reqFileName}`, reqTemplate, 'utf8');
fs.writeFileSync(`${this.resolverFilePath}/${resFileName}`, resTemplate, 'utf8');
let resolver = new AppSync.Resolver({
ApiId: Fn.Ref(ResourceConstants.PARAMETERS.AppSyncApiId),
DataSourceName: Fn.GetAtt(ResourceConstants.RESOURCES.RelationalDatabaseDataSource, 'Name'),
FieldName: fieldName,
TypeName: queryTypeName,
RequestMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: reqFileName,
}),
ResponseMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: resFileName,
}),
}).dependsOn([ResourceConstants.RESOURCES.RelationalDatabaseDataSource]);
return resolver;
}
/**
* Creates and Returns the CFN Spec for the 'Update' Resolver Resource provided
* a GraphQL type
*
* @param type - the graphql type for which the update resolver will be created
* @param mutationTypeName - will be 'Mutation'
*/
private makeUpdateRelationalResolver(type: string, mutationTypeName: string = 'Mutation') {
const tableName = this.getTableName(type);
const operationType = GRAPHQL_RESOLVER_OPERATION.Update;
const fieldName = this.getFieldName(type, operationType);
const updateSql = this.generateUpdateStatement(type);
const selectSql = this.generateSelectByPrimaryKeyStatement(type, operationType);
const reqFileName = `${mutationTypeName}.${fieldName}.req.vtl`;
const resFileName = `${mutationTypeName}.${fieldName}.res.vtl`;
const reqTemplate = print(
compoundExpression([
set(ref('updateList'), obj({})),
forEach(ref('entry'), ref(`ctx.args.update${tableName}Input.keySet()`), [
set(ref('discard'), ref(`updateList.put($entry, "'$ctx.args.update${tableName}Input[$entry]'")`)),
]),
set(ref('update'), ref(`updateList.toString().replace("{","").replace("}","")`)),
RelationalDBMappingTemplate.rdsQuery({
statements: list([str(updateSql), str(selectSql)]),
}),
]),
);
const resTemplate: string = print(
compoundExpression([
set(ref('output'), ref('utils.rds.toJsonObject($ctx.result)')),
iff(
ref('output.length() < 2'),
methodCall(ref('util.error'), str(rdsResponseErrorMessage), str(rdsResponseErrorType), obj({}), ref('output')),
),
set(ref('output'), ref('output[1]')),
iff(ref('output.isEmpty()'), ret()),
methodCall(ref('utils.toJson'), ref('output[0]')),
]),
);
fs.writeFileSync(`${this.resolverFilePath}/${reqFileName}`, reqTemplate, 'utf8');
fs.writeFileSync(`${this.resolverFilePath}/${resFileName}`, resTemplate, 'utf8');
let resolver = new AppSync.Resolver({
ApiId: Fn.Ref(ResourceConstants.PARAMETERS.AppSyncApiId),
DataSourceName: Fn.GetAtt(ResourceConstants.RESOURCES.RelationalDatabaseDataSource, 'Name'),
TypeName: mutationTypeName,
FieldName: fieldName,
RequestMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: reqFileName,
}),
ResponseMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: resFileName,
}),
}).dependsOn([ResourceConstants.RESOURCES.RelationalDatabaseDataSource]);
return resolver;
}
/**
* Creates and Returns the CFN Spec for the 'Delete' Resolver Resource provided
* a GraphQL type
*
* @param type - the graphql type for which the delete resolver will be created
* @param mutationTypeName - will be 'Mutation'
*/
private makeDeleteRelationalResolver(type: string, mutationTypeName: string = 'Mutation') {
const operationType = GRAPHQL_RESOLVER_OPERATION.Delete;
const fieldName = this.getFieldName(type, operationType);
const selectSql = this.generateSelectByPrimaryKeyStatement(type, operationType);
const deleteSql = this.generateDeleteStatement(type);
const reqFileName = `${mutationTypeName}.${fieldName}.req.vtl`;
const resFileName = `${mutationTypeName}.${fieldName}.res.vtl`;
const reqTemplate = print(
compoundExpression([
RelationalDBMappingTemplate.rdsQuery({
statements: list([str(selectSql), str(deleteSql)]),
}),
]),
);
const resTemplate: string = print(
compoundExpression([
set(ref('output'), ref('utils.rds.toJsonObject($ctx.result)')),
iff(
ref('output.isEmpty()'),
methodCall(ref('util.error'), str(rdsResponseErrorMessage), str(rdsResponseErrorType), obj({}), ref('output')),
),
set(ref('output'), ref('output[0]')),
iff(ref('output.isEmpty()'), ret()),
methodCall(ref('utils.toJson'), ref('output[0]')),
]),
);
fs.writeFileSync(`${this.resolverFilePath}/${reqFileName}`, reqTemplate, 'utf8');
fs.writeFileSync(`${this.resolverFilePath}/${resFileName}`, resTemplate, 'utf8');
let resolver = new AppSync.Resolver({
ApiId: Fn.Ref(ResourceConstants.PARAMETERS.AppSyncApiId),
DataSourceName: Fn.GetAtt(ResourceConstants.RESOURCES.RelationalDatabaseDataSource, 'Name'),
TypeName: mutationTypeName,
FieldName: fieldName,
RequestMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: reqFileName,
}),
ResponseMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: resFileName,
}),
}).dependsOn([ResourceConstants.RESOURCES.RelationalDatabaseDataSource]);
return resolver;
}
/**
* Creates and Returns the CFN Spec for the 'List' Resolver Resource provided
* a GraphQL type
*
* @param type - the graphql type for which the list resolver will be created
* @param queryTypeName - will be 'Query'
*/
private makeListRelationalResolver(type: string, improvePluralization: boolean, queryTypeName: string = 'Query') {
const fieldName = graphqlName(GRAPHQL_RESOLVER_OPERATION.List + plurality(toUpper(type), improvePluralization));
const selectSql = this.generateSelectStatement(type);
const reqFileName = `${queryTypeName}.${fieldName}.req.vtl`;
const resFileName = `${queryTypeName}.${fieldName}.res.vtl`;
const reqTemplate = print(
RelationalDBMappingTemplate.rdsQuery({
statements: list([str(selectSql)]),
}),
);
const resTemplate = print(ref('utils.toJson($utils.rds.toJsonObject($ctx.result)[0])'));
fs.writeFileSync(`${this.resolverFilePath}/${reqFileName}`, reqTemplate, 'utf8');
fs.writeFileSync(`${this.resolverFilePath}/${resFileName}`, resTemplate, 'utf8');
let resolver = new AppSync.Resolver({
ApiId: Fn.Ref(ResourceConstants.PARAMETERS.AppSyncApiId),
DataSourceName: Fn.GetAtt(ResourceConstants.RESOURCES.RelationalDatabaseDataSource, 'Name'),
TypeName: queryTypeName,
FieldName: fieldName,
RequestMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: reqFileName,
}),
ResponseMappingTemplateS3Location: Fn.Sub(s3BaseUrl, {
[ResourceConstants.PARAMETERS.S3DeploymentBucket]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentBucket),
[ResourceConstants.PARAMETERS.S3DeploymentRootKey]: Fn.Ref(ResourceConstants.PARAMETERS.S3DeploymentRootKey),
[resolverFileName]: resFileName,
}),
}).dependsOn([ResourceConstants.RESOURCES.RelationalDatabaseDataSource]);
return resolver;
}
/**
* Generate the table name to use on sql statements
*
* @param type - the graphql type to infer the table name
* @returns string with the table name
*/
private getTableName(type: string): string {
return toUpper(type);
}
/**
* Using the CRUDL+Q and the graphql type generate the graphql operation name
*
* @param type - the graphql type to infer the table name
* @param operationType The CRUDL+Q (Create, Retrieve, Update, Delete, List + Queries) operation name
*
* @returns string with the graphql operation name
*/
private getFieldName(type: string, operationType: GRAPHQL_RESOLVER_OPERATION): string {
const tableName = this.getTableName(type);
return graphqlName(`${operationType}${tableName}`);
}
/**
* Generate the primary key column name to use on sql statements
*
* @param type - the graphql type to get the primary key
* @returns string with the table name
*/
private getTablePrimaryKey(type: string): string {
return this.typePrimaryKeyMap.get(type);
}
/**
* Check if the type of the primary key is string to apply different transformation on sql statements
*
* @param type - the graphql type to check
* @returns boolean true if the primary key is a string type, otherwise false
*/
private isPrimaryKeyAStringType(type: string): boolean {
return this.typePrimaryKeyTypeMap.get(type).includes('String');
}
/**
* Generate the select sql statement to retrieve all rows
*
* @param type - the graphql type
* @returns string with the sql statement
*/
private generateSelectStatement(type: string): string {
const tableName = this.getTableName(type);
return `SELECT * FROM ${tableName}`;
}
/**
* Generate the select sql statement filter by the primary key
*
* @param type - the graphql type
* @param operationType The CRUDL+Q (Create, Retrieve, Update, Delete, List + Queries) operation name
* @returns string with the sql statement
*/
private generateSelectByPrimaryKeyStatement(type: string, operationType: GRAPHQL_RESOLVER_OPERATION): string {
const tableName = this.getTableName(type);
const primaryKey = this.getTablePrimaryKey(type);
const hasToAppendOperationInput = ![GRAPHQL_RESOLVER_OPERATION.Get, GRAPHQL_RESOLVER_OPERATION.Delete].includes(operationType);
const operationInput = hasToAppendOperationInput ? `${operationType}${tableName}Input.` : '';
if (this.isPrimaryKeyAStringType(type)) {
return `SELECT * FROM ${tableName} WHERE ${primaryKey}=\'$ctx.args.${operationInput}${primaryKey}\'`;
}
return `SELECT * FROM ${tableName} WHERE ${primaryKey}=$ctx.args.${operationInput}${primaryKey}`;
}
/**
* Generate the insert sql statement
*
* @param type - the graphql type
* @returns string with the sql statement
*/
private generateInsertStatement(type: string): string {
const tableName = this.getTableName(type);
return `INSERT INTO ${tableName} $colStr VALUES $valStr`;
}
/**
* Generate the update sql statement
*
* @param type - the graphql type
* @returns string with the sql statement
*/
private generateUpdateStatement(type: string): string {
const tableName = this.getTableName(type);
const primaryKey = this.getTablePrimaryKey(type);
if (this.isPrimaryKeyAStringType(type)) {
return `UPDATE ${type} SET $update WHERE ${primaryKey}=\'$ctx.args.update${tableName}Input.${primaryKey}\'`;
}
return `UPDATE ${type} SET $update WHERE ${primaryKey}=$ctx.args.update${tableName}Input.${primaryKey}`;
}
/**
* Generate the delete sql statement
*
* @param type - the graphql type
* @returns string with the sql statement
*/
private generateDeleteStatement(type: string): string {
const primaryKey = this.getTablePrimaryKey(type);
if (this.isPrimaryKeyAStringType(type)) {
return `DELETE FROM ${type} WHERE ${primaryKey}=\'$ctx.args.${primaryKey}\'`;
}
return `DELETE FROM ${type} WHERE ${primaryKey}=$ctx.args.${primaryKey}`;
}
}
enum GRAPHQL_RESOLVER_OPERATION {
Create = 'create',
Delete = 'delete',
Get = 'get',
List = 'list',
Update = 'update',
} | the_stack |
import { generateId } from '@/common/js/util';
import BkFlow from '@blueking/bkflow.js/lib/index';
import { Component, Emit, Prop, PropSync, Ref, Vue, Watch } from 'vue-property-decorator';
import { confirmModelOverview, getModelOverview } from '../../Api';
import { DataModelManage, IStepsManage } from './IStepsManage';
let treeInstance = null;
@Component
export default class DataModelPreview extends DataModelManage.IStepsManage {
/**
* 节点头部配置
*/
public tableSetting = {
dimension_table: {
title: '',
icon: 'icon-dimension-model',
nameKey: 'model_name',
aliasKey: 'model_alias',
},
fact_table: {
title: '主表',
icon: 'icon-fact-model',
nameKey: 'model_name',
aliasKey: 'model_alias',
},
calculation_atom: {
title: '指标统计口径',
icon: 'icon-statistic-caliber',
nameKey: 'calculation_atom_name',
aliasKey: 'calculation_atom_alias',
},
indicator: {
title: '指标',
icon: 'icon-quota',
nameKey: 'indicator_name',
aliasKey: 'indicator_alias',
},
};
/**
* 节点类型配置
*/
public nodeSetting = {
dimension: {
icon: 'icon-dimens',
color: '#4BC7AD',
},
primary_key: {
icon: 'icon-key-line',
color: '#3A84FF',
},
measure: {
icon: 'icon-measure-line',
color: '#F1CB56',
},
};
public isPreviewLoading = false;
public collpse = ['calculation_atom', 'indicator', 'fact_table', 'dimension_table'];
public nodes: IDataModelManage.IModelList[] = [];
public lines: any[] = [];
public nodeTreeData: any[] = [];
public hasDimensionNode = false;
public hasCalculationAtomNode = false;
public hasIndicatorNode = false;
public activeCalculationAtom = '';
public activeIndicator = '';
public activeDimensionTable = '';
public activeFactTable = '';
public dimensionNode: object = {};
public tips = {
isShow: false,
instance: null,
content: {
name: '',
alias: '',
},
};
private uid = generateId('dataModelManager_preview_canvas_');
/**
* 获取模型预览数据
*/
public getModelOverviewData() {
this.isPreviewLoading = true;
getModelOverview(this.modelId)
.then(res => {
res.setDataFn(data => {
this.$set(this, 'nodes', data.nodes || []);
this.$set(this, 'lines', data.lines || []);
// 判断是否有关联维度节点
const dimensionTable = data.nodes.find(node => node.node_type === 'dimension_table');
this.hasDimensionNode = !!dimensionTable;
dimensionTable && this.$set(this, 'dimensionNode', dimensionTable);
// 判断是否有统计口径节点
const calculationTable = data.nodes.find(node => node.node_type === 'calculation_atom');
this.hasCalculationAtomNode = !!calculationTable;
// 判断是否有指标节点
const indicatorTable = data.nodes.find(node => node.node_type === 'indicator');
this.hasIndicatorNode = !!indicatorTable;
// 处理字段排序 主键 => 维度 => 度量
const fieldCategoryValue = {
dimension: 2,
measure: 3,
};
// 处理节点fields
this.nodes.forEach(node => {
const fields = node.fields || [];
const extendFields = fields.filter(item => item.is_extended_field);
const renderFields = node.fields
.filter(item => !item.is_extended_field)
.map(field => Object.assign({}, field, {
extends: extendFields.filter(extend => extend.join_field_name === field.field_name),
})
);
renderFields.sort((next, prev) => {
if (next.field_name === '__time__') {
return 1;
}
if (prev.field_name === '__time__') {
return -1;
}
if (next.is_primary_key && prev.is_primary_key) {
return 0;
}
if (next.is_primary_key && !prev.is_primary_key) {
return -1;
}
if (!next.is_primary_key && prev.is_primary_key) {
return 1;
}
return fieldCategoryValue[next.field_category] - fieldCategoryValue[prev.field_category];
});
node.render_fields = renderFields;
});
// 格式化画布数据
this.nodeTreeData = this.getNodeTreeData();
// 设置展开节点
this.hasCalculationAtomNode && (this.activeCalculationAtom = calculationTable.node_id);
this.hasIndicatorNode && (this.activeIndicator = indicatorTable.node_id);
this.hasDimensionNode && (this.activeDimensionTable = dimensionTable.node_id);
const factTable = data.nodes.find(node => node.node_type === 'fact_table');
this.activeFactTable = factTable?.node_id;
// 绘制画布
this.renderNodeTree();
});
})
['finally'](() => {
this.isPreviewLoading = false;
});
}
/**
* 获取树节点数据
*/
public getNodeTreeData(): any[] {
const nodeData = [];
const nodeMap = this.nodes.reduce((map, node) => {
map[node.node_id] = node;
return map;
}, {});
const lineMap = this.lines.reduce((map, line) => {
map[line.from] ? map[line.from].push(line.to) : (map[line.from] = [line.to]);
return map;
}, {});
const rootNodeName = this.hasDimensionNode ? 'dimension_table' : 'fact_table';
for (const node of this.nodes) {
const nodeKeys = lineMap[node.node_id];
const childNodes = [];
if (node.node_type === rootNodeName) {
node.children = [];
if (nodeKeys?.length) {
for (const key of nodeKeys) {
nodeMap[key] && node.children.push(nodeMap[key]);
}
}
nodeData.push(node);
continue;
}
if (nodeKeys?.length) {
for (const key of nodeKeys) {
nodeMap[key] && childNodes.push(nodeMap[key]);
}
node.children ? node.children.push(...childNodes) : (node.children = childNodes);
}
}
return nodeData;
}
/**
* 渲染节点头部
* @param node
*/
public renderHeader(node) {
const info = this.tableSetting[node.node_type];
if (node.node_type === 'fact_table' && node.model_type === 'dimension_table') {
info.icon = 'icon-dimension-model';
}
return (
(info.title ? `<span class="type">${info.title}</span>` : '')
+ `<div class="node-header">
<i class="header-icon ${info.icon}"></i>
<div class="info" title="${node[info.nameKey]}(${node[info.aliasKey]})">
<span class="info-text text-overflow">${node[info.nameKey]}</span>
<span class="info-text text-overflow">${node[info.aliasKey]}</span>
</div>
${this.collpse.includes(node.node_type) ? '<i class="toggle-icon icon-angle-double-up"></i>' : ''}
</div>`
);
}
/**
* 渲染节点内容
* @param field
*/
public renderItem(field: IDataModelManage.IMasterTableField) {
const key = field.is_primary_key ? 'primary_key' : field.field_category;
const info = this.nodeSetting[key] || {};
const extendFields = field['extends'] || [];
const curFieldName = field.field_name === '__time__' ? '--' : field.field_name;
const curFieldAlias = field.field_alias ? `(${field.field_alias})` : '';
// 扩展字段处理
if (extendFields.length) {
let extendItems = `
<div class="node-item join-item" title="${curFieldName} ${curFieldAlias}">
<i class="item-icon ${info.icon}"></i>
<span class="text-overflow">${curFieldName} ${curFieldAlias}</span>
</div>
`;
for (const item of extendFields) {
const showFieldName = item.field_name === '__time__' ? '--' : item.field_name;
const showFieldAlias = item.field_alias ? `(${item.field_alias})` : '';
const curKey = item.is_primary_key ? 'primary_key' : item.field_category;
const curInfo = this.nodeSetting[curKey] || {};
extendItems += `
<div class="node-item extend-item" title="${showFieldName} ${showFieldAlias}">
<i class="item-icon ${curInfo.icon}"></i>
<span class="text-overflow">${showFieldName} ${showFieldAlias}</span>
</div>
`;
}
return `<div class="expand-node-item" style="--color: ${info.color}">${extendItems}</div>`;
}
return `
<div class="node-item" style="--color: ${info.color}" title="${curFieldName} ${curFieldAlias}">
<i class="item-icon ${info.icon}"></i>
<span class="text-overflow">${curFieldName} ${curFieldAlias}</span>
</div>
`;
}
/**
* 获取节点配置信息
*/
public getNodeConfig() {
const nodeConfig: any[] = [];
const headerHeight = 88;
const itemHeight = 44;
this.nodes.forEach(node => {
let height = headerHeight + node.fields.length * itemHeight;
if (
this.collpse.includes(node.node_type)
&& this.activeCalculationAtom !== node.node_id
&& this.activeIndicator !== node.node_id
&& this.activeDimensionTable !== node.node_id
&& this.activeFactTable !== node.node_id
) {
height = 44;
}
node.table_node_type = node.node_id;
nodeConfig.push({
node_type: node.node_type,
table_node_type: node.node_id,
width: 320,
height,
radius: '2px',
});
});
return nodeConfig;
}
/**
* 渲染节点树
*/
public renderNodeTree() {
const self = this;
const groupData = [
{
id: 'factTable',
text: '明细数据表',
width: 0,
background: '#DCDEE5',
nodeDepth: this.hasDimensionNode ? 2 : 1,
splitor: {
show: false,
},
},
];
this.hasCalculationAtomNode
&& groupData.push({
id: 'calculationAtom',
text: '指标统计口径',
width: 0,
background: '#DCDEE5',
splitor: {
show: true,
/** 宽度或者高度 */
weight: 4,
background: 'rgba(255, 255, 255, 0.3)',
},
});
this.hasCalculationAtomNode
&& this.hasIndicatorNode
&& groupData.push({
id: 'indicator',
text: '指标',
width: 0,
background: '#DCDEE5',
splitor: {
show: true,
weight: 4,
background: 'rgba(255, 255, 255, 0.3)',
},
});
// 当只有明细表的时候对节点做偏移
const offsetX = groupData.length === 1 ? (this.hasDimensionNode ? -400 : -100) : 0;
treeInstance = new BkFlow('#' + this.uid, {
mode: 'readonly',
background: '#F0F1F5',
lineLabel: false,
nodeTemplateKey: 'table_node_type',
autoBestTarget: false,
nodeConfig: self.getNodeConfig(),
renderVisibleArea: true,
/** 目前只支持 horizontal mode */
flexTree: {
/** 初始位置偏移量 */
offset: {
x: offsetX,
y: 0,
},
/** 同一层级叶子结点间距 */
depthSpacing: 30,
/** 父子节点间间距 */
nodeSpacing: 248,
/** 节点对齐方式: mode = horizontal(top center bottom) | mode = vertical(left center right) */
nodeAlign: 'center',
},
lineConfig: {
canvasLine: false,
color: '#C4C6CC',
activeColor: '#C4C6CC',
},
tree: {
formatNodePosition: true,
mode: 'horizontal', // horizontal | vertical,
horizontalSpacing: 0,
verticalSpacing: 408,
chartArea: {
left: 0,
top: 0,
},
},
groupConfig: {
enabled: true,
/** 分组显示位置:bottom|right|top|left, undefined|null表示不显示 */
position: 'bottom',
data: groupData,
},
zoom: {
scaleExtent: [0.8, 1],
controlPanel: false,
tools: [],
},
onNodeRender(node) {
// 处理节点展开/折叠
if (
self.collpse.includes(node.node_type)
&& !(
node.node_id === self.activeCalculationAtom
|| node.node_id === self.activeIndicator
|| node.node_id === self.activeDimensionTable
|| node.node_id === self.activeFactTable
)
) {
const info = self.tableSetting[node.node_type] || {};
return `
<div class="node-wrapper ${node.node_type}" id="${node.node_id}">
<div class="node-item collapse-item">
<i class="item-icon ${info.icon}"></i>
<span class="text-overflow">${node[info.nameKey]} (${node[info.aliasKey]})</span>
<i class="toggle-icon icon-angle-double-down"></i>
</div>
</div>
`;
}
const header = self.renderHeader(node);
const fields = node.render_fields || [];
let items = '';
for (const field of fields) {
items += self.renderItem(field);
}
return `
<div class="node-wrapper ${node.node_type}">
${header}
${items}
</div>
`;
},
})
.on(
'nodeClick',
(node, event) => {
// 不同节点类型处理
const type = node.node_type;
if (type === 'calculation_atom') {
this.activeCalculationAtom = this.activeCalculationAtom === node.node_id ? '' : node.node_id;
this.activeCalculationAtom && this.sendUserActionData({ name: '展开【指标统计口径】' });
}
if (type === 'indicator') {
this.activeIndicator = this.activeIndicator === node.node_id ? '' : node.node_id;
this.activeIndicator && this.sendUserActionData({ name: '展开【指标】' });
}
if (type === 'dimension_table') {
this.activeDimensionTable = this.activeDimensionTable === node.node_id ? '' : node.node_id;
}
if (type === 'fact_table') {
this.activeFactTable = this.activeFactTable === node.node_id ? '' : node.node_id;
}
treeInstance.updateTree(self.nodeTreeData, 'node_id', 'node_id', {
nodeConfig: self.getNodeConfig()
});
this.setNodeLinesOffset();
},
'node'
)
.on(
'nodeMouseEnter',
(node, event) => {
if (
self.collpse.includes(node.node_type)
&& !(
node.node_id === self.activeCalculationAtom
|| node.node_id === self.activeIndicator
|| node.node_id === self.activeDimensionTable
|| node.node_id === self.activeFactTable
)
) {
const keys = this.tableSetting[node.node_type];
self.tips.instance && self.tips.instance.destroy();
self.tips.instance = self.$bkPopover(document.getElementById(node.node_id), {
content: node[keys.nameKey] + '<br />' + node[keys.aliasKey],
zIndex: 9999,
trigger: 'manual',
boundary: 'window',
arrow: true,
interactive: true,
extCls: 'bk-data-model-preview',
placement: 'top',
});
self.tips.show = true;
self.$nextTick(() => {
self.tips.instance.show();
});
}
},
'node'
)
.on(
'nodeMouseLeave',
(node, event) => {
self.tips.instance && self.tips.instance.hide();
self.$nextTick(() => {
self.tips.show = false;
});
},
'node'
);
treeInstance.renderTree(this.nodeTreeData, 'node_id', 'node_id');
// 设置画布初始化偏移量
this.setViewOffset();
// 设置节点连线偏移量
this.setNodeLinesOffset();
}
/**
* 设置画布初始化偏移量
*/
public setViewOffset() {
const storeNodes = treeInstance._nodesManager._store.nodes || [];
const masterNode = storeNodes.find(node => node.node_type === 'fact_table');
const { width, height } = treeInstance._options.computedStyle;
const offsetX = this.hasDimensionNode ? 0 : 80;
treeInstance.translate(width / 4 - masterNode.x - offsetX, height > 0 ? height / 4 - masterNode.y : 100);
treeInstance.updateTree(this.nodeTreeData, 'node_id', 'node_id', { nodeConfig: this.getNodeConfig() });
treeInstance.zoomOut();
}
/**
* 设置节点连线偏移量
*/
public setNodeLinesOffset() {
const fromHeadHeight = 60;
const toHeadHeight = 88;
const halfItemHeight = 22;
const newLines = [];
// 获取fields集合
const nodeFieldMap = {};
// 主表节点Id
let mstNodeId = '';
for (const node of this.nodes) {
node.model_type === 'fact_table' && (mstNodeId = node.node_id);
nodeFieldMap[node.node_id] = node.render_fields || [];
}
for (const line of this.lines) {
const fromNode = line.from;
const toNode = line.to;
const offsetSetting = {
from: {
id: fromNode,
},
to: {
id: toNode,
},
};
if (this.activeFactTable) {
if (Object.prototype.hasOwnProperty.call(line, 'from_field_name')) {
const index = nodeFieldMap[fromNode].findIndex(item => item.field_name === line.from_field_name);
const expendFields = nodeFieldMap[fromNode]
.slice(0, index)
.reduce((arr, item) => arr.concat(...item['extends']), []);
const offsetY = (index + expendFields.length + 1) * 2 - 1;
offsetSetting.from.offset = {
x: null,
y: offsetY * halfItemHeight + fromHeadHeight,
};
}
if (Object.prototype.hasOwnProperty.call(line, 'to_field_name')) {
const index = nodeFieldMap[toNode].findIndex(item => item.field_name === line.to_field_name);
const expendFields = nodeFieldMap[toNode]
.slice(0, index)
.reduce((arr, item) => arr.concat(...item['extends']), []);
const offsetY = (index + expendFields.length + 1) * 2 - 1;
offsetSetting.to.offset = {
x: null,
y: offsetY * halfItemHeight + toHeadHeight,
};
}
}
// 设置维度表到主表连线偏移
if (/^dimension_table/.test(fromNode) && /^fact_table/.test(toNode) && this.activeFactTable) {
offsetSetting.from.offset = {
x: null,
y: halfItemHeight,
};
}
// 设置主表到统计口径连线偏移
if (/^fact_table/.test(fromNode) && /^calculation_atom/.test(toNode)) {
offsetSetting.from.offset = {
x: null,
y: this.activeFactTable ? 58 : halfItemHeight,
};
if (this.activeCalculationAtom === toNode) {
offsetSetting.to.offset = {
x: null,
y: 58,
};
}
}
// 设置统计口径到指标连线偏移
if (/^calculation_atom/.test(fromNode) && /^indicator/.test(toNode)) {
if (this.activeCalculationAtom === fromNode) {
offsetSetting.from.offset = {
x: null,
y: 58,
};
}
if (this.activeIndicator === toNode) {
offsetSetting.to.offset = {
x: null,
y: 58,
};
}
}
newLines.push(offsetSetting);
}
treeInstance.updateLinePosition(newLines);
}
/**
* 点击下一步方法
*/
public nextStepClick() {
confirmModelOverview(this.modelId).then(res => {
if (res.validateResult(null, null, false)) {
const activeTabItem = this.DataModelTabManage.getActiveItem()[0];
activeTabItem.lastStep = res.data.step_id;
this.DataModelTabManage.updateTabItem(activeTabItem);
this.DataModelTabManage.dispatchEvent('updateModel', [
{
model_id: this.modelId,
step_id: res.data.step_id,
},
]);
}
});
}
public created() {
this.initPreNextManage();
this.getModelOverviewData();
}
} | the_stack |
import { CollisionPostSolveEvent, CollisionPreSolveEvent, PostCollisionEvent, PreCollisionEvent } from '../../Events';
import { clamp } from '../../Util/Util';
import { CollisionContact } from '../Detection/CollisionContact';
import { CollisionType } from '../CollisionType';
import { ContactConstraintPoint } from './ContactConstraintPoint';
import { Side } from '../Side';
import { Physics } from '../Physics';
import { CollisionSolver } from './Solver';
import { BodyComponent } from '../BodyComponent';
import { CollisionJumpTable } from '../Colliders/CollisionJumpTable';
export class RealisticSolver extends CollisionSolver {
lastFrameContacts: Map<string, CollisionContact> = new Map();
// map contact id to contact points
idToContactConstraint: Map<string, ContactConstraintPoint[]> = new Map();
getContactConstraints(id: string) {
return this.idToContactConstraint.get(id) ?? [];
}
preSolve(contacts: CollisionContact[]) {
for (const contact of contacts) {
// Publish collision events on both participants
const side = Side.fromDirection(contact.mtv);
contact.colliderA.events.emit('precollision', new PreCollisionEvent(contact.colliderA, contact.colliderB, side, contact.mtv));
contact.colliderA.events.emit(
'beforecollisionresolve',
new CollisionPreSolveEvent(contact.colliderA, contact.colliderB, side, contact.mtv, contact) as any
);
contact.colliderB.events.emit(
'precollision',
new PreCollisionEvent(contact.colliderB, contact.colliderA, Side.getOpposite(side), contact.mtv.negate())
);
contact.colliderB.events.emit(
'beforecollisionresolve',
new CollisionPreSolveEvent(contact.colliderB, contact.colliderA, Side.getOpposite(side), contact.mtv.negate(), contact) as any
);
// Match awake state for sleeping
contact.matchAwake();
}
// Keep track of contacts that done
const finishedContactIds = Array.from(this.idToContactConstraint.keys());
for (const contact of contacts) {
// Remove all current contacts that are not done
const index = finishedContactIds.indexOf(contact.id);
if (index > -1) {
finishedContactIds.splice(index, 1);
}
const contactPoints = this.idToContactConstraint.get(contact.id) ?? [];
let pointIndex = 0;
const bodyA = contact.colliderA.owner.get(BodyComponent);
const bodyB = contact.colliderB.owner.get(BodyComponent);
if (bodyA && bodyB) {
for (const point of contact.points) {
const normal = contact.normal;
const tangent = contact.tangent;
const aToContact = point.sub(bodyA.pos);
const bToContact = point.sub(bodyB.pos);
const aToContactNormal = aToContact.cross(normal);
const bToContactNormal = bToContact.cross(normal);
const normalMass =
bodyA.inverseMass +
bodyB.inverseMass +
bodyA.inverseInertia * aToContactNormal * aToContactNormal +
bodyB.inverseInertia * bToContactNormal * bToContactNormal;
const aToContactTangent = aToContact.cross(tangent);
const bToContactTangent = bToContact.cross(tangent);
const tangentMass =
bodyA.inverseMass +
bodyB.inverseMass +
bodyA.inverseInertia * aToContactTangent * aToContactTangent +
bodyB.inverseInertia * bToContactTangent * bToContactTangent;
// Preserve normal/tangent impulse by re-using the contact point if it's close
if (contactPoints[pointIndex] && contactPoints[pointIndex]?.point?.squareDistance(point) < 4) {
contactPoints[pointIndex].point = point;
contactPoints[pointIndex].local = contact.localPoints[pointIndex];
} else {
// new contact if it's not close or doesn't exist
contactPoints[pointIndex] = new ContactConstraintPoint(point, contact.localPoints[pointIndex], contact);
}
// Update contact point calculations
contactPoints[pointIndex].aToContact = aToContact;
contactPoints[pointIndex].bToContact = bToContact;
contactPoints[pointIndex].normalMass = normalMass;
contactPoints[pointIndex].tangentMass = tangentMass;
pointIndex++;
}
}
this.idToContactConstraint.set(contact.id, contactPoints);
}
// Clean up any contacts that did not occur last frame
for (const id of finishedContactIds) {
this.idToContactConstraint.delete(id);
}
// Warm contacts with accumulated impulse
// Useful for tall stacks
if (Physics.warmStart) {
this.warmStart(contacts);
} else {
for (const contact of contacts) {
const contactPoints = this.getContactConstraints(contact.id);
for (const point of contactPoints) {
point.normalImpulse = 0;
point.tangentImpulse = 0;
}
}
}
}
postSolve(contacts: CollisionContact[]) {
for (const contact of contacts) {
const bodyA = contact.colliderA.owner.get(BodyComponent);
const bodyB = contact.colliderB.owner.get(BodyComponent);
if (bodyA && bodyB) {
// Skip post solve for active+passive collisions
if (bodyA.collisionType === CollisionType.Passive || bodyB.collisionType === CollisionType.Passive) {
continue;
}
// Update motion values for sleeping
bodyA.updateMotion();
bodyB.updateMotion();
}
// Publish collision events on both participants
const side = Side.fromDirection(contact.mtv);
contact.colliderA.events.emit('postcollision', new PostCollisionEvent(contact.colliderA, contact.colliderB, side, contact.mtv));
contact.colliderA.events.emit(
'aftercollisionresolve',
new CollisionPostSolveEvent(contact.colliderA, contact.colliderB, side, contact.mtv, contact) as any
);
contact.colliderB.events.emit(
'postcollision',
new PostCollisionEvent(contact.colliderB, contact.colliderA, Side.getOpposite(side), contact.mtv.negate())
);
contact.colliderB.events.emit(
'aftercollisionresolve',
new CollisionPostSolveEvent(contact.colliderB, contact.colliderA, Side.getOpposite(side), contact.mtv.negate(), contact) as any
);
}
// Store contacts
this.lastFrameContacts.clear();
for (const c of contacts) {
this.lastFrameContacts.set(c.id, c);
}
}
/**
* Warm up body's based on previous frame contact points
* @param contacts
*/
warmStart(contacts: CollisionContact[]) {
for (const contact of contacts) {
const bodyA = contact.colliderA.owner?.get(BodyComponent);
const bodyB = contact.colliderB.owner?.get(BodyComponent);
if (bodyA && bodyB) {
const contactPoints = this.idToContactConstraint.get(contact.id) ?? [];
for (const point of contactPoints) {
if (Physics.warmStart) {
const normalImpulse = contact.normal.scale(point.normalImpulse);
const tangentImpulse = contact.tangent.scale(point.tangentImpulse);
const impulse = normalImpulse.add(tangentImpulse);
bodyA.applyImpulse(point.point, impulse.negate());
bodyB.applyImpulse(point.point, impulse);
} else {
point.normalImpulse = 0;
point.tangentImpulse = 0;
}
}
}
}
}
/**
* Iteratively solve the position overlap constraint
* @param contacts
*/
solvePosition(contacts: CollisionContact[]) {
for (let i = 0; i < Physics.positionIterations; i++) {
for (const contact of contacts) {
const bodyA = contact.colliderA.owner?.get(BodyComponent);
const bodyB = contact.colliderB.owner?.get(BodyComponent);
if (bodyA && bodyB) {
// Skip solving active+passive
if (bodyA.collisionType === CollisionType.Passive || bodyB.collisionType === CollisionType.Passive) {
continue;
}
const constraints = this.idToContactConstraint.get(contact.id) ?? [];
for (const point of constraints) {
const normal = contact.normal;
const separation = CollisionJumpTable.FindContactSeparation(contact, point.local);
const steeringConstant = Physics.steeringFactor; //0.2;
const maxCorrection = -5;
const slop = Physics.slop; //1;
// Clamp to avoid over-correction
// Remember that we are shooting for 0 overlap in the end
const steeringForce = clamp(steeringConstant * (separation + slop), maxCorrection, 0);
const impulse = normal.scale(-steeringForce / point.normalMass);
// This is a pseudo impulse, meaning we aren't doing a real impulse calculation
// We adjust position and rotation instead of doing the velocity
if (bodyA.collisionType === CollisionType.Active) {
bodyA.pos = bodyA.pos.add(impulse.negate().scale(bodyA.inverseMass));
bodyA.rotation -= point.aToContact.cross(impulse) * bodyA.inverseInertia;
}
if (bodyB.collisionType === CollisionType.Active) {
bodyB.pos = bodyB.pos.add(impulse.scale(bodyB.inverseMass));
bodyB.rotation += point.bToContact.cross(impulse) * bodyB.inverseInertia;
}
}
}
}
}
}
solveVelocity(contacts: CollisionContact[]) {
for (let i = 0; i < Physics.velocityIterations; i++) {
for (const contact of contacts) {
const bodyA = contact.colliderA.owner?.get(BodyComponent);
const bodyB = contact.colliderB.owner?.get(BodyComponent);
if (bodyA && bodyB) {
// Skip solving active+passive
if (bodyA.collisionType === CollisionType.Passive || bodyB.collisionType === CollisionType.Passive) {
continue;
}
const restitution = bodyA.bounciness * bodyB.bounciness;
const friction = Math.min(bodyA.friction, bodyB.friction);
const constraints = this.idToContactConstraint.get(contact.id) ?? [];
for (const point of constraints) {
const relativeVelocity = point.getRelativeVelocity();
// Negate velocity in tangent direction to simulate friction
const tangentVelocity = -relativeVelocity.dot(contact.tangent);
let impulseDelta = tangentVelocity / point.tangentMass;
// Clamping based in Erin Catto's GDC 2006 talk
// Correct clamping https://github.com/erincatto/box2d-lite/blob/master/docs/GDC2006_Catto_Erin_PhysicsTutorial.pdf
// Accumulated fiction impulse is always between -uMaxFriction < dT < uMaxFriction
// But deltas can vary
const maxFriction = friction * point.normalImpulse;
const newImpulse = clamp(point.tangentImpulse + impulseDelta, -maxFriction, maxFriction);
impulseDelta = newImpulse - point.tangentImpulse;
point.tangentImpulse = newImpulse;
const impulse = contact.tangent.scale(impulseDelta);
bodyA.applyImpulse(point.point, impulse.negate());
bodyB.applyImpulse(point.point, impulse);
}
for (const point of constraints) {
// Need to recalc relative velocity because the previous step could have changed vel
const relativeVelocity = point.getRelativeVelocity();
// Compute impulse in normal direction
const normalVelocity = relativeVelocity.dot(contact.normal);
// See https://en.wikipedia.org/wiki/Collision_response
let impulseDelta = (-(1 + restitution) * normalVelocity) / point.normalMass;
// Clamping based in Erin Catto's GDC 2014 talk
// Accumulated impulse stored in the contact is always positive (dV > 0)
// But deltas can be negative
const newImpulse = Math.max(point.normalImpulse + impulseDelta, 0);
impulseDelta = newImpulse - point.normalImpulse;
point.normalImpulse = newImpulse;
const impulse = contact.normal.scale(impulseDelta);
bodyA.applyImpulse(point.point, impulse.negate());
bodyB.applyImpulse(point.point, impulse);
}
}
}
}
}
} | the_stack |
import {
AfterViewChecked,
AfterViewInit,
ChangeDetectionStrategy,
Component,
ElementRef,
EventEmitter,
HostBinding,
Input,
OnDestroy,
Output,
NgZone,
Self,
Optional,
ChangeDetectorRef
} from '@angular/core';
import {
NgControl,
ControlValueAccessor
} from '@angular/forms';
@Component({
selector: 'input[soho-colorpicker]', // eslint-disable-line
template: '<ng-content></ng-content>',
changeDetection: ChangeDetectionStrategy.OnPush
})
export class SohoColorPickerComponent implements AfterViewInit, AfterViewChecked, OnDestroy {
/**
* Flag to force an update of the control after the view is created.
*/
private runUpdatedOnCheck?: boolean;
/** Value Accessor */
private valueAccessor?: SohoColorPickerComponentValueAccessorDelegator;
/**
* Local variables
*/
private jQueryElement: any;
private colorpicker: any;
private isEditable?: boolean = undefined;
isUppercase?: boolean = undefined;
isClearable?: boolean = undefined;
private isDisabled?: boolean = undefined;
private isReadOnly?: boolean = undefined;
private isShowLabel?: boolean = undefined;
isColorOnly?: boolean = undefined;
hasCustomColor?: boolean = undefined;
clearableTextString = '';
private options?: SohoColorPickerOptions = {
showLabel: false,
editable: true,
uppercase: true,
colorOnly: false,
clearable: true,
customColors: false,
disabled: false
};
/**
* Indicates the color list
*/
@Input() set colors(colors: Array<SohoColorOption> | undefined) {
(this.options as any).colors = colors;
if (this.colorpicker) {
this.colorpicker.settings.colors = colors;
this.markForRefresh();
}
}
/**
* Enables or disables the control
*/
@Input() set disabled(value: boolean | undefined) {
this.isDisabled = value;
(this.options as any).disabled = value;
if (this.colorpicker) {
this.colorpicker.settings.disabled = value;
}
if (this.colorpicker) {
if (value) {
this.ngZone.runOutsideAngular(() => this.colorpicker.disable());
this.isDisabled = true;
} else {
this.ngZone.runOutsideAngular(() => this.colorpicker.enable());
this.isDisabled = false;
this.isReadOnly = false;
}
}
}
get disabled() {
return this.isDisabled;
}
/**
* Enables or disables editing
*/
@Input() set editable(value: boolean | undefined) {
this.isEditable = value;
(this.options as any).editable = value;
if (this.colorpicker) {
this.colorpicker.settings.editable = value;
}
if (this.colorpicker) {
if (value) {
this.ngZone.runOutsideAngular(() => this.colorpicker.enable());
this.isEditable = true;
} else {
this.ngZone.runOutsideAngular(() => this.colorpicker.readonly());
this.isEditable = false;
}
}
}
get editable() {
return this.isEditable;
}
/**
* Enables or disables forces upper case hex.
*/
@Input() set uppercase(value: boolean) {
this.isUppercase = value;
(this.options as any).uppercase = value;
if (this.colorpicker) {
this.colorpicker.settings.uppercase = value;
this.markForRefresh();
}
}
/**
* Enables or disables the clear option.
*/
@Input() set clearable(value: boolean) {
this.isClearable = value;
(this.options as any).clearable = value;
if (this.colorpicker) {
this.colorpicker.settings.clearable = value;
this.markForRefresh();
}
}
/**
* Enables or disables the clear option.
*/
@Input() set customColors(value: boolean) {
this.hasCustomColor = value;
(this.options as any).customColors = value;
if (this.colorpicker) {
this.colorpicker.settings.customColors = value;
this.markForRefresh();
}
}
/**
* Enables or disables the colorOnly option.
*/
@Input() set colorOnly(value: boolean) {
this.isColorOnly = value;
(this.options as any).colorOnly = value;
if (this.colorpicker) {
this.colorpicker.settings.colorOnly = value;
this.markForRefresh();
}
}
/**
* Sets the clearableText option.
*/
@Input() set clearableText(value: string) {
this.clearableTextString = value;
(this.options as any).clearableText = value;
if (this.colorpicker) {
this.colorpicker.settings.clearableText = value;
this.markForRefresh();
}
}
/**
* Add extra attributes like id's to the component
*/
@Input() set attributes(value: Array<Object> | Object) {
(this.options as any).attributes = value;
if (this.colorpicker) {
this.colorpicker.settings.attributes = value;
this.markForRefresh();
}
}
/**
* Sets the control to readonly
*/
@Input() set readonly(value: boolean | undefined) {
this.isReadOnly = value;
// 4.3.1 did not have this method in time add a safety check it works for future versions
if (this.colorpicker && this.colorpicker.readonly) {
if (value) {
this.ngZone.runOutsideAngular(() => this.colorpicker.readonly());
this.isReadOnly = true;
} else {
this.ngZone.runOutsideAngular(() => this.colorpicker.enable());
this.isDisabled = false;
this.isReadOnly = false;
}
}
}
get readonly() {
return this.isReadOnly;
}
/**
* Sets the control to show color label
*/
@Input() set showLabel(value: boolean | undefined) {
this.isShowLabel = value;
(this.options as any).showLabel = value;
if (this.colorpicker) {
this.colorpicker.settings.showLabel = value;
this.markForRefresh();
}
}
get showLabel() {
return this.isShowLabel;
}
/**
* Called when the colorpicker value changes
*
* @todo replace override of native attribute
*/
// eslint-disable-next-line @angular-eslint/no-output-native, @angular-eslint/no-output-rename
@Output() change: EventEmitter<SohoColorPickerEvent> = new EventEmitter<SohoColorPickerEvent>();
/**
* Called when the colorpicker updates in some way.
*/
@Output('updated') // eslint-disable-line
updatedEvent: EventEmitter<JQuery.TriggeredEvent> = new EventEmitter<JQuery.TriggeredEvent>();
/**
* Public API
*/
getLabelValue() {
return this.colorpicker.getLabelValue();
}
getHexValue() {
return this.colorpicker.getHexValue();
}
/**
* Bind attributes to the host input element
*/
@HostBinding('class.colorpicker') get isColorpicker() {
return true;
}
/**
* Creates an instance of SohoColorPickerComponent.
*
* @param element the element this component encapsulates.
* @param ngZone the angualar zone for this component
* @param ngControl any associated form control (optional)
*
*/
constructor(
private element: ElementRef,
private ngZone: NgZone,
@Self() @Optional() public ngControl: NgControl,
public ref: ChangeDetectorRef) {
// Is the control using a form control and/or ngModel?
if (this.ngControl) {
// Wrap the accessor to allow updates to be pushed,
// but also use the standard accessors provided by angular.
this.valueAccessor =
new SohoColorPickerComponentValueAccessorDelegator( // teslint-disable-line
(this.ngControl.valueAccessor as any), this); // eslint-disable-line
// ... change the accessor on the control to use ours.
this.ngControl.valueAccessor = this.valueAccessor;
}
}
ngAfterViewInit() {
// call outside the angular zone so change detection
// isn't triggered by the soho component.
this.ngZone.runOutsideAngular(() => {
// assign element to local variable
this.jQueryElement = jQuery(this.element.nativeElement);
// initialise the colorpicker control
this.jQueryElement.colorpicker(this.options);
// extract the api
this.colorpicker = this.jQueryElement.data('colorpicker');
// Add event binding
this.jQueryElement
.on('change', (event: JQuery.TriggeredEvent) => this.onChanged(event));
this.runUpdatedOnCheck = true;
});
}
ngAfterViewChecked() {
if (this.runUpdatedOnCheck) {
this.ngZone.runOutsideAngular(() => {
// this.disabled = this.isDisabled;
// We need to update the control AFTER the model
// has been updated (assuming there is one), so
// execute updated after angular has generated
// the model and the view markup.
setTimeout(() => this.updated());
this.runUpdatedOnCheck = false;
});
}
}
/**
* Event handler for the 'changed' event on the 'colorpicker' component.
*
*
* @param event the standard jQuery event.
*/
private onChanged(event: any) {
// Retrieve the value from the 'colorpicker' component.
const internalValue = this.colorpicker.element.val();
// Make sure calls to angular are made in the right zone.
this.ngZone.run(() => {
// ... update the model (which will fire change
// detection if required).
this.colorpicker.setColor(internalValue);
(this.valueAccessor as any).onChangeFn(internalValue);
this.change.emit(event);
});
}
/**
* In case options are being bound asynchronously, you will need to trigger updated on
* soho colorpicker control so it updates its value labels.
*/
public updated(): SohoColorPickerComponent {
if (this.colorpicker) {
// Calling updated when an item is selected, looses the selection!
this.ngZone.runOutsideAngular(() => this.colorpicker.updated());
}
return this;
}
onUpdated(event: JQuery.TriggeredEvent) {
// Fire the event, in the angular zone.
this.ngZone.run(() => this.updatedEvent.next(event));
}
/**
* This function is called when the control status changes to or from "DISABLED".
* Depending on the value, it will enable or disable the appropriate DOM element.
*/
setDisabledState(isDisabled: boolean): void {
this.disabled = isDisabled;
}
ngOnDestroy() {
if (this.colorpicker) {
this.colorpicker.destroy();
this.colorpicker = null;
}
}
/**
* Marks the components as requiring a rebuild after the next update.
*/
markForRefresh() {
// Run updated on the next updated check.
this.runUpdatedOnCheck = true;
// ... make sure the change detector kicks in, otherwise if the inputs
// were change programmatially the component may not be eligible for
// updating.
this.ref.markForCheck();
}
}
/**
* Provides a 'wrapper' around the {ControlValueAccessor} added by
* angular when handling changing color.s
*
* This class allows the {SohoColorPickerComponent} to interoperate with
* the {ControlValueAccessor}. Specifically, providing access to the
* onChange function, which we must call when the value of the colorpicker
* is modified.
*/
class SohoColorPickerComponentValueAccessorDelegator implements ControlValueAccessor {
/**
* The Function to call when the value of the control changes.
*/
public onChangeFn?: Function;
/**
* Creates an instance of SohoColorPickerComponentValueAccessorDelegate.
*
* @param delegate the value accessor
* @param colorpicker the colorpicker linked to the accessor
*/
constructor(
private delegate: ControlValueAccessor,
private colorpicker: SohoColorPickerComponent) { }
writeValue(value: any): void {
// Just pass it on.
this.delegate.writeValue(value);
this.colorpicker.markForRefresh();
}
registerOnChange(fn: any): void {
// Keep a reference to the change function, then we an call it.
this.onChangeFn = fn;
// Give the delegate a chance to store this too.
this.delegate.registerOnChange(fn);
}
registerOnTouched(fn: any): void {
this.delegate.registerOnTouched(fn);
}
setDisabledState?(isDisabled: boolean): void {
(this.delegate as any).setDisabledState(isDisabled);
}
} | the_stack |
import { assert } from "chai";
import { ITypeHandlers, IWrapHandlers, Options } from "../../lib/options";
describe("options", () => {
describe("#Options", () => {
describe("aliasString", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
aliasString: "=",
};
assert.strictEqual(
new Options(options).aliasString,
options.aliasString,
);
options.aliasString = "test";
assert.strictEqual(
new Options(options).aliasString,
options.aliasString,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(new Options(options).aliasString, "=");
},
);
});
describe("attributeString", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
attributeString: "@",
};
assert.strictEqual(
new Options(options).attributeString,
options.attributeString,
);
options.attributeString = "test";
assert.strictEqual(
new Options(options).attributeString,
options.attributeString,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(
new Options(options).attributeString,
"@",
);
},
);
});
describe("cdataInvalidChars", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
cdataInvalidChars: false,
};
assert.strictEqual(
new Options(options).cdataInvalidChars,
options.cdataInvalidChars,
);
options.cdataInvalidChars = true;
assert.strictEqual(
new Options(options).cdataInvalidChars,
options.cdataInvalidChars,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(
new Options(options).cdataInvalidChars,
false,
);
},
);
});
describe("cdataKeys", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
cdataKeys: ["test", "test2"],
};
assert.deepEqual(
new Options(options).cdataKeys,
options.cdataKeys,
);
options.cdataKeys = [];
assert.deepEqual(
new Options(options).cdataKeys,
options.cdataKeys,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).cdataKeys, []);
},
);
});
describe("declaration", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
declaration: {
encoding: undefined,
include: true,
standalone: undefined,
version: undefined,
},
};
assert.deepEqual(
new Options(options).declaration,
options.declaration,
);
options.declaration = {
encoding: undefined,
include: false,
standalone: undefined,
version: undefined,
};
assert.deepEqual(
new Options(options).declaration,
options.declaration,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).declaration, {
encoding: undefined,
include: true,
standalone: undefined,
version: undefined,
});
},
);
});
describe("dtd", () => {
it("should leave the specified property unchanged if valid", () => {
{
const options = {
dtd: {
include: false,
name: undefined,
pubId: undefined,
sysId: undefined,
},
};
assert.deepEqual(new Options(options).dtd, options.dtd);
}
{
const options = {
dtd: {
include: true,
name: "abc",
pubId: undefined,
sysId: undefined,
},
};
assert.deepEqual(new Options(options).dtd, options.dtd);
}
});
it(
"should throw an error if the specified options object" +
" contains invalid options",
() => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const options: any = {
dtd: {
include: true,
},
};
assert.throws(() => new Options(options));
},
);
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).dtd, {
include: false,
name: undefined,
pubId: undefined,
sysId: undefined,
});
},
);
});
describe("format", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
format: {
doubleQuotes: undefined,
indent: undefined,
newline: undefined,
pretty: undefined,
},
};
assert.deepEqual(new Options(options).format, options.format);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).format, {
doubleQuotes: undefined,
indent: undefined,
newline: undefined,
pretty: undefined,
});
},
);
});
describe("replaceInvalidChars", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
replaceInvalidChars: false,
};
assert.strictEqual(
new Options(options).replaceInvalidChars,
options.replaceInvalidChars,
);
options.replaceInvalidChars = true;
assert.strictEqual(
new Options(options).replaceInvalidChars,
options.replaceInvalidChars,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(
new Options(options).replaceInvalidChars,
false,
);
},
);
});
describe("typeHandlers", () => {
it("should leave the specified property unchanged if valid", () => {
const typeHandlers: ITypeHandlers = {
test1: () => {
return "test2";
},
test3: () => {
return "test4";
},
};
const options = {
typeHandlers,
};
assert.deepEqual(
new Options(options).typeHandlers,
options.typeHandlers,
);
options.typeHandlers = {};
assert.deepEqual(
new Options(options).typeHandlers,
options.typeHandlers,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).typeHandlers, {});
},
);
});
describe("useSelfClosingTagIfEmpty", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
useSelfClosingTagIfEmpty: true,
};
assert.strictEqual(
new Options(options).useSelfClosingTagIfEmpty,
options.useSelfClosingTagIfEmpty,
);
options.useSelfClosingTagIfEmpty = false;
assert.strictEqual(
new Options(options).useSelfClosingTagIfEmpty,
options.useSelfClosingTagIfEmpty,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(
new Options(options).useSelfClosingTagIfEmpty,
true,
);
},
);
});
describe("validation", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
validation: true,
};
assert.strictEqual(
new Options(options).validation,
options.validation,
);
options.validation = false;
assert.strictEqual(
new Options(options).validation,
options.validation,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(new Options(options).validation, true);
},
);
});
describe("valueString", () => {
it("should leave the specified property unchanged if valid", () => {
const options = {
valueString: "#",
};
assert.strictEqual(
new Options(options).valueString,
options.valueString,
);
options.valueString = "test";
assert.strictEqual(
new Options(options).valueString,
options.valueString,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.strictEqual(new Options(options).valueString, "#");
},
);
});
describe("wrapHandlers", () => {
it("should leave the specified property unchanged if valid", () => {
const wrapHandlers: IWrapHandlers = {
test1: () => {
return "test2";
},
test3: () => {
return "test4";
},
};
const options = {
wrapHandlers,
};
assert.deepEqual(
new Options(options).wrapHandlers,
options.wrapHandlers,
);
options.wrapHandlers = {};
assert.deepEqual(
new Options(options).wrapHandlers,
options.wrapHandlers,
);
});
it(
"should return a validated version of the specified property" +
" if undefined",
() => {
const options = {};
assert.deepEqual(new Options(options).wrapHandlers, {});
},
);
});
});
}); | the_stack |
import {assertExists} from '../testing/internal/test_utils';
import * as Bytes from './bytes';
import * as EllipticCurves from './elliptic_curves';
import * as Random from './random';
import {WYCHEPROOF_ECDH_TEST_VECTORS} from './wycheproof_ecdh_test_vectors';
describe('elliptic curves test', function() {
beforeEach(function() {
// Use a generous promise timeout for running continuously.
jasmine.DEFAULT_TIMEOUT_INTERVAL = 1000 * 1000; // 1000s
});
afterEach(function() {
// Reset the promise timeout to default value.
jasmine.DEFAULT_TIMEOUT_INTERVAL = 1000; // 1s
});
it('compute ecdh shared secret', async function() {
const aliceKeyPair = await EllipticCurves.generateKeyPair('ECDH', 'P-256');
const bobKeyPair = await EllipticCurves.generateKeyPair('ECDH', 'P-256');
const sharedSecret1 = await EllipticCurves.computeEcdhSharedSecret(
aliceKeyPair.privateKey, bobKeyPair.publicKey);
const sharedSecret2 = await EllipticCurves.computeEcdhSharedSecret(
bobKeyPair.privateKey, aliceKeyPair.publicKey);
expect(Bytes.toHex(sharedSecret2)).toBe(Bytes.toHex(sharedSecret1));
});
it('wycheproof, wycheproof webcrypto', async function() {
for (const testGroup of WYCHEPROOF_ECDH_TEST_VECTORS['testGroups']) {
let errors = '';
for (const test of testGroup['tests']) {
errors += await runWycheproofTest(test);
}
if (errors !== '') {
fail(errors);
}
}
});
// Test that both ECDH public and private key are defined in the result.
it('generate key pair e c d h', async function() {
const curveTypes = [
EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521
];
for (const curve of curveTypes) {
const curveTypeString = EllipticCurves.curveToString(curve);
const keyPair =
await EllipticCurves.generateKeyPair('ECDH', curveTypeString);
expect(keyPair.privateKey != null).toBe(true);
expect(keyPair.publicKey != null).toBe(true);
}
});
// Test that both ECDSA public and private key are defined in the result.
it('generate key pair e c d s a', async function() {
const curveTypes = [
EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521
];
for (const curve of curveTypes) {
const curveTypeString = EllipticCurves.curveToString(curve);
const keyPair =
await EllipticCurves.generateKeyPair('ECDSA', curveTypeString);
expect(keyPair.privateKey != null).toBe(true);
expect(keyPair.publicKey != null).toBe(true);
}
});
// Test that when ECDH crypto key is exported and imported it gives the same
// key as the original one.
it('import export crypto key e c d h', async function() {
const curveTypes = [
EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521
];
for (const curve of curveTypes) {
const curveTypeString = EllipticCurves.curveToString(curve);
const keyPair =
await EllipticCurves.generateKeyPair('ECDH', curveTypeString);
const publicKey = keyPair.publicKey;
const publicCryptoKey = await EllipticCurves.exportCryptoKey(publicKey);
const importedPublicKey =
await EllipticCurves.importPublicKey('ECDH', publicCryptoKey);
expect(importedPublicKey).toEqual(publicKey);
const privateKey = keyPair.privateKey;
const privateCryptoKey = await EllipticCurves.exportCryptoKey(privateKey);
const importedPrivateKey =
await EllipticCurves.importPrivateKey('ECDH', privateCryptoKey);
expect(importedPrivateKey).toEqual(privateKey);
}
});
// Test that when ECDSA crypto key is exported and imported it gives the same
// key as the original one.
it('import export crypto key e c d s a', async function() {
const curveTypes = [
EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521
];
for (const curve of curveTypes) {
const curveTypeString = EllipticCurves.curveToString(curve);
const keyPair =
await EllipticCurves.generateKeyPair('ECDSA', curveTypeString);
const publicKey = keyPair.publicKey;
const publicCryptoKey = await EllipticCurves.exportCryptoKey(publicKey);
const importedPublicKey =
await EllipticCurves.importPublicKey('ECDSA', publicCryptoKey);
expect(importedPublicKey).toEqual(publicKey);
const privateKey = keyPair.privateKey;
const privateCryptoKey = await EllipticCurves.exportCryptoKey(privateKey);
const importedPrivateKey =
await EllipticCurves.importPrivateKey('ECDSA', privateCryptoKey);
expect(importedPrivateKey).toEqual(privateKey);
}
});
// Test that when JSON ECDH web key is imported and exported it gives the same
// key as the original one.
it('import export json key e c d h', async function() {
for (const testKey of TEST_KEYS) {
const jwk: JsonWebKey = ({
'kty': 'EC',
'crv': testKey.curve,
'x': Bytes.toBase64(Bytes.fromHex(testKey.x), true),
'y': Bytes.toBase64(Bytes.fromHex(testKey.y), true),
'ext': true,
});
let importedKey;
if (!testKey.d) {
jwk['key_ops'] = [];
importedKey = await EllipticCurves.importPublicKey('ECDH', jwk);
} else {
jwk['key_ops'] = ['deriveKey', 'deriveBits'];
jwk['d'] = Bytes.toBase64(Bytes.fromHex(testKey.d), true);
importedKey = await EllipticCurves.importPrivateKey('ECDH', jwk);
}
const exportedKey = await EllipticCurves.exportCryptoKey(importedKey);
expect(exportedKey).toEqual(jwk);
}
});
// Test that when JSON ECDSA web key is imported and exported it gives the
// same key as the original one.
it('import export json key e c d s a', async function() {
for (const testKey of TEST_KEYS) {
const jwk: JsonWebKey = ({
'kty': 'EC',
'crv': testKey.curve,
'x': Bytes.toBase64(Bytes.fromHex(testKey.x), true),
'y': Bytes.toBase64(Bytes.fromHex(testKey.y), true),
'ext': true,
});
let importedKey;
if (!testKey.d) {
jwk['key_ops'] = ['verify'];
importedKey = await EllipticCurves.importPublicKey('ECDSA', jwk);
} else {
jwk['key_ops'] = ['sign'];
jwk['d'] = Bytes.toBase64(Bytes.fromHex(testKey.d), true);
importedKey = await EllipticCurves.importPrivateKey('ECDSA', jwk);
}
const exportedKey = await EllipticCurves.exportCryptoKey(importedKey);
expect(exportedKey).toEqual(jwk);
}
});
it('curve to string', function() {
expect(EllipticCurves.curveToString(EllipticCurves.CurveType.P256))
.toBe('P-256');
expect(EllipticCurves.curveToString(EllipticCurves.CurveType.P384))
.toBe('P-384');
expect(EllipticCurves.curveToString(EllipticCurves.CurveType.P521))
.toBe('P-521');
});
it('curve from string', function() {
expect(EllipticCurves.curveFromString('P-256'))
.toBe(EllipticCurves.CurveType.P256);
expect(EllipticCurves.curveFromString('P-384'))
.toBe(EllipticCurves.CurveType.P384);
expect(EllipticCurves.curveFromString('P-521'))
.toBe(EllipticCurves.CurveType.P521);
});
it('field size in bytes', function() {
expect(EllipticCurves.fieldSizeInBytes(EllipticCurves.CurveType.P256))
.toBe(256 / 8);
expect(EllipticCurves.fieldSizeInBytes(EllipticCurves.CurveType.P384))
.toBe(384 / 8);
expect(EllipticCurves.fieldSizeInBytes(EllipticCurves.CurveType.P521))
.toBe((521 + 7) / 8);
});
it('encoding size in bytes, uncompressed point format type', function() {
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P256,
EllipticCurves.PointFormatType.UNCOMPRESSED))
.toBe(2 * (256 / 8) + 1);
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P384,
EllipticCurves.PointFormatType.UNCOMPRESSED))
.toBe(2 * (384 / 8) + 1);
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P521,
EllipticCurves.PointFormatType.UNCOMPRESSED))
.toBe(2 * ((521 + 7) / 8) + 1);
});
it('encoding size in bytes, compressed point format type', function() {
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P256,
EllipticCurves.PointFormatType.COMPRESSED))
.toBe((256 / 8) + 1);
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P384,
EllipticCurves.PointFormatType.COMPRESSED))
.toBe((384 / 8) + 1);
expect(EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P521,
EllipticCurves.PointFormatType.COMPRESSED))
.toBe(((521 + 7) / 8) + 1);
});
it('encoding size in bytes, crunchy uncompressed point format type',
function() {
expect(
EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P256,
EllipticCurves.PointFormatType.DO_NOT_USE_CRUNCHY_UNCOMPRESSED))
.toBe(2 * (256 / 8));
expect(
EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P384,
EllipticCurves.PointFormatType.DO_NOT_USE_CRUNCHY_UNCOMPRESSED))
.toBe(2 * (384 / 8));
expect(
EllipticCurves.encodingSizeInBytes(
EllipticCurves.CurveType.P521,
EllipticCurves.PointFormatType.DO_NOT_USE_CRUNCHY_UNCOMPRESSED))
.toBe(2 * ((521 + 7) / 8));
});
it('point decode, wrong point size', function() {
const point = new Uint8Array(10);
const format = EllipticCurves.PointFormatType.UNCOMPRESSED;
for (const curve
of [EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521]) {
const curveTypeString = EllipticCurves.curveToString(curve);
// It should throw an exception as the point array is too short.
try {
EllipticCurves.pointDecode(curveTypeString, format, point);
fail('Should throw an exception.');
} catch (e) {
expect(e.toString()).toBe('InvalidArgumentsException: invalid point');
}
}
});
it('point decode, unknown curve', function() {
const point = new Uint8Array(10);
const format = EllipticCurves.PointFormatType.UNCOMPRESSED;
const curve = 'some-unknown-curve';
try {
EllipticCurves.pointDecode(curve, format, point);
fail('Should throw an exception.');
} catch (e) {
expect(e.toString().includes('unknown curve')).toBe(true);
}
});
it('point encode decode', function() {
const format = EllipticCurves.PointFormatType.UNCOMPRESSED;
for (const curveType
of [EllipticCurves.CurveType.P256, EllipticCurves.CurveType.P384,
EllipticCurves.CurveType.P521]) {
const curveTypeString = EllipticCurves.curveToString(curveType);
const x = Random.randBytes(EllipticCurves.fieldSizeInBytes(curveType));
const y = Random.randBytes(EllipticCurves.fieldSizeInBytes(curveType));
const point: JsonWebKey = ({
'kty': 'EC',
'crv': curveTypeString,
'x': Bytes.toBase64(x, /* websafe = */ true),
'y': Bytes.toBase64(y, /* websafe = */ true),
'ext': true,
});
const encodedPoint =
EllipticCurves.pointEncode(assertExists(point['crv']), format, point);
const decodedPoint =
EllipticCurves.pointDecode(curveTypeString, format, encodedPoint);
expect(decodedPoint).toEqual(point);
}
});
it('ecdsa der2 ieee', function() {
for (const test of ECDSA_IEEE_DER_TEST_VECTORS) {
expect(EllipticCurves.ecdsaDer2Ieee(test.der, test.ieee.length))
.toEqual(test.ieee);
}
});
it('ecdsa der2 ieee with invalid signatures', function() {
for (const test of INVALID_DER_ECDSA_SIGNATURES) {
try {
EllipticCurves.ecdsaDer2Ieee(
Bytes.fromHex(test), 1 /* ieeeLength, ignored */);
} catch (e) {
expect(e.toString())
.toBe('InvalidArgumentsException: invalid DER signature');
}
}
});
it('ecdsa ieee2 der', function() {
for (const test of ECDSA_IEEE_DER_TEST_VECTORS) {
expect(EllipticCurves.ecdsaIeee2Der(test.ieee)).toEqual(test.der);
}
});
it('is valid der ecdsa signature', function() {
for (const test of INVALID_DER_ECDSA_SIGNATURES) {
expect(EllipticCurves.isValidDerEcdsaSignature(Bytes.fromHex(test)))
.toBe(false);
}
});
});
/**
* Runs the test with test vector given as an input and returns either empty
* string or a text describing the failure.
*/
async function runWycheproofTest(test: {
'tcId': number,
'public': JsonWebKey,
'private': JsonWebKey,
'shared': string,
'result': string,
}): Promise<string> {
try {
const privateKey =
await EllipticCurves.importPrivateKey('ECDH', test['private']);
try {
const publicKey =
await EllipticCurves.importPublicKey('ECDH', test['public']);
const sharedSecret =
await EllipticCurves.computeEcdhSharedSecret(privateKey, publicKey);
if (test['result'] === 'invalid') {
return 'Fail on test ' + test['tcId'] + ': No exception thrown.\n';
}
const sharedSecretHex = Bytes.toHex(sharedSecret);
if (sharedSecretHex !== test['shared']) {
return 'Fail on test ' + test['tcId'] + ': unexpected result was \"' +
sharedSecretHex + '\".\n';
}
} catch (e) {
if (test['result'] === 'valid') {
return 'Fail on test ' + test['tcId'] + ': unexpected exception \"' +
e.toString() + '\".\n';
}
}
} catch (e) {
if (test['result'] === 'valid') {
if (test['private']['crv'] == "P-256K") {
// P-256K doesn't have to be supported. Hence failing to import the
// key is OK.
return '';
}
return 'Fail on test ' + test['tcId'] +
': unexpected exception trying to import private key \"' +
e.toString() + '\".\n';
}
}
// If the test passes return an empty string.
return '';
}
class TestKey {
constructor(
readonly curve: string, readonly x: string, readonly y: string,
readonly d?: string) {}
}
// This set of keys was generated by Java version of Tink.
// It contains one private and one public key for each curve type supported by
// Tink.
const TEST_KEYS: TestKey[] = [
new TestKey(
/* curve = */ 'P-256',
/* x = */
'2eab800e5d8e9b15d0f87c55324b477ffc9382d7137599e0203113a4e41b50d0',
/* y = */
'50bb2c11cfb72f3c380c2f93ea088d6938b91bcf581cd94a73ed0a3f623a6b8b'),
new TestKey(
/* curve = */ 'P-256',
/* x = */
'844c085cc4450297b681126356e10da074dea817f69bc2b1f3d6b1fc82593c7d',
/* y = */
'3cdb41fc89867d2066cc9c4f9ad7e890152bad24de20621abfe608234cbe40f1',
/* opt_d = */
'f96796cc28b36038817cc5d7db01c52ee0411dd848dc0833e9e26e989e4a64db'),
new TestKey(
/* curve = */ 'P-384',
/* x = */
'f3290cc80faa65e8821b0bf835f51e3431a4d78dcebd81b74c53b9b704bd995df93b648d51057a9a96a654fb8332391e',
/* y = */
'7e52bb9f654781a6894ef5ae77869207fa32ddbcec4a02d27ba1ead5472b3b9f39b09e9bca7d936809c143e99c655401'),
new TestKey(
/* curve = */ 'P-384',
/* x = */
'be9df79abedb82fc0e527630955f63f2f74b4984f0a4ac063a089565393ed20ac7a784f4efa434f5b1fa1837c76c8472',
/* y = */
'cf34ad0d4f3f2cbd546780509ec7073bb26fa0547d09ed10b83bf9b90903037ac956dbd661d02ce3e397e0547356b331',
/* opt_d = */
'34d86595280a8bdca23ccd60eeac9581016e895c2bc867c26dc2f99f6d0f627ce586ad36d1d2981968d8852dc9276d12'),
new TestKey(
/* curve = */ 'P-521',
/* x = */
'012f2211ec7e634919857be3066becf20c438b84ff24501712c91c98f527b44c7b001f8611935cb1179541c2b3cc3a1fc9259d50cd4842a847ea0cafe22cd75fe788',
/* y = */
'016b5d3f5480122643a26ef9e7c7e36875f53c28167d6afc35777d32ea76127d34287325bf14779f2e4cf3864fcc951ba601cec92b03291e34db2e815d4bd6fc2045'),
new TestKey(
/* curve = */ 'P-521',
/* x = */
'01ee3aabecef323cb4581e044be21914b567c426eae18d71720a71a0b236f5324ef9666fe855f5d7986d3e33a9250396f63c780572b3ad9417d69c2a87773ce39194',
/* y = */
'0036bea90db019304719d269e5335f9790e730e241a1b02cfdab8bdcfd0bcff8bdcb3ddeb9c3a94ecff1ab6abb80b0c1655f871c6089d3a4bf8625cf6bd182897f1b',
/* opt_d = */
'00b9f9f5d91cbfa9b7f92b041b137ac9822ca4a38f71ce227f624cac6178ca8351fab24bc2cc3f85d7ab72f54a0f9d1bb11a888a79a9c7b1ca267ddc82043585e437')
];
class EcdsaIeeeDerTestVector {
ieee: Uint8Array;
der: Uint8Array;
constructor(ieee: string, der: string) {
this.ieee = Bytes.fromHex(ieee);
this.der = Bytes.fromHex(der);
}
}
const ECDSA_IEEE_DER_TEST_VECTORS: EcdsaIeeeDerTestVector[] = [
new EcdsaIeeeDerTestVector( // normal case, short-form length
'0102030405060708090a0b0c0d0e0f100102030405060708090a0b0c0d0e0f10',
'302402100102030405060708090a0b0c0d0e0f1002100102030405060708090a0b0c0d0e0f10'),
new EcdsaIeeeDerTestVector( // normal case, long-form length
'010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000203010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000203',
'30818802420100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000002030242010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000010000000203'),
new EcdsaIeeeDerTestVector( // zero prefix.
'0002030405060708090a0b0c0d0e0f100002030405060708090a0b0c0d0e0f10',
'3022020f02030405060708090a0b0c0d0e0f10020f02030405060708090a0b0c0d0e0f10'),
new EcdsaIeeeDerTestVector( // highest bit is set.
'00ff030405060708090a0b0c0d0e0f1000ff030405060708090a0b0c0d0e0f10',
'3024021000ff030405060708090a0b0c0d0e0f10021000ff030405060708090a0b0c0d0e0f10'),
new EcdsaIeeeDerTestVector( // highest bit is set, full length.
'ff02030405060708090a0b0c0d0e0f10ff02030405060708090a0b0c0d0e0f10',
'3026021100ff02030405060708090a0b0c0d0e0f10021100ff02030405060708090a0b0c0d0e0f10'),
new EcdsaIeeeDerTestVector( // all zeros.
'0000000000000000000000000000000000000000000000000000000000000000',
'3006020100020100'),
];
const INVALID_DER_ECDSA_SIGNATURES: string[] = [
'2006020101020101', // 1st byte is not 0x30 (SEQUENCE tag)
'3006050101020101', // 3rd byte is not 0x02 (INTEGER tag)
'3006020101050101', // 6th byte is not 0x02 (INTEGER tag)
'308206020101020101', // long form length is not 0x81
'30ff020101020101', // invalid total length
'3006020201020101', // invalid rLength
'3006020101020201', // invalid sLength
'30060201ff020101', // no extra zero when highest bit of r is set
'30060201010201ff', // no extra zero when highest bit of s is set
]; | the_stack |
import { EventEmitter } from 'events';
import pRetry from 'p-retry';
import pTimeout from 'p-timeout';
import { Core, Web } from 'sip.js';
import { Invitation } from 'sip.js/lib/api/invitation';
import { Inviter } from 'sip.js/lib/api/inviter';
import { Publisher } from 'sip.js/lib/api/publisher';
import { PublisherOptions } from 'sip.js/lib/api/publisher-options';
import { Registerer } from 'sip.js/lib/api/registerer';
import { RegistererState } from 'sip.js/lib/api/registerer-state';
import { Subscriber } from 'sip.js/lib/api/subscriber';
import { UserAgent } from 'sip.js/lib/api/user-agent';
import { UserAgentOptions, SIPExtension } from 'sip.js/lib/api/user-agent-options';
import { IncomingInviteRequest, IncomingRequestMessage, TransportError } from 'sip.js/lib/core';
import { ClientStatus, ReconnectionMode } from './enums';
import * as Features from './features';
import { HealthChecker } from './health-checker';
import { increaseTimeout, jitter } from './lib/utils';
import { log } from './logger';
import { sessionDescriptionHandlerFactory } from './session-description-handler';
import { hour, second } from './time';
import { IClientOptions, IRetry } from './types';
export type UAFactory = (options: UserAgentOptions) => UserAgent;
/**
* @hidden
*/
export interface ITransportDelegate {
/**
* Do something with the invitation before onInvite is called.
* @param invitation the SIP Invitation instance.
* @returns {boolean} if true it will stop the onInvite early.
*/
onBeforeInvite(invitation: Invitation): boolean;
}
/**
* @hidden
*/
export interface ITransport extends EventEmitter {
registeredPromise: Promise<any>;
registered: boolean;
status: ClientStatus;
delegate?: ITransportDelegate;
configure(options: IClientOptions): void;
connect(): Promise<boolean>;
disconnect(options?: { hasRegistered: boolean }): Promise<void>;
updatePriority(flag: boolean): void;
getConnection(mode: ReconnectionMode): Promise<boolean>;
close(): void;
createInviter(phoneNumber: string): Inviter;
createSubscriber(contact: string): Subscriber;
createPublisher(contact: string, options: PublisherOptions): Publisher;
}
/**
* @hidden
*/
export type TransportFactory = (uaFactory: UAFactory, options: IClientOptions) => ITransport;
/**
* @hidden
*/
export class WrappedTransport extends Web.Transport {
/**
* Disconnect socket. It could happen that the user switches network
* interfaces while calling. If this happens, closing a websocket will
* cause it to be blocked. To make sure that UA gets to the proper internal
* state so that it is ready to 'switch over' to the new network interface
* with a new websocket, we call the function that normally causes the
* disconnectPromise to be resolved after a timeout.
*/
protected disconnectPromise(options: any = {}): Promise<any> {
return pTimeout(super.disconnectPromise(), 1000, () => {
log.debug('Fake-closing the the socket by ourselves.', this.constructor.name);
(this as any).onClose({ code: 'fake', reason: 'Artificial timeout' });
}).then(() => ({ overrideEvent: true })); // overrideEvent to avoid sip.js emitting disconnected.
}
}
const SIP_PRESENCE_EXPIRE = hour / second; // one hour in seconds
const logLevelConversion = {
[Core.Levels.debug]: 'debug',
[Core.Levels.log]: 'info',
[Core.Levels.warn]: 'warn',
[Core.Levels.error]: 'error'
};
const connector = (level, category, label, content) => {
const convertedLevel = logLevelConversion[level] || 'debug';
log.log(convertedLevel, content, category);
};
const CANCELLED_REASON = {
'Call completed elsewhere': 'call_completed_elsewhere'
};
/**
* @hidden
*/
export class ReconnectableTransport extends EventEmitter implements ITransport {
public registeredPromise: Promise<any>;
public registered = false;
public status: ClientStatus = ClientStatus.DISCONNECTED;
public delegate?: ITransportDelegate;
private priority = false;
private unregisteredPromise: Promise<any>;
private uaFactory: UAFactory;
private uaOptions: UserAgentOptions;
private userAgent: UserAgent;
private dyingCounter = 60000;
private wsTimeout = 10000;
private dyingIntervalID: number;
private retry: IRetry = { interval: 2000, limit: 30000, timeout: 250 };
private registerer: Registerer;
private unregisterer: Registerer;
private boundOnWindowOffline: EventListenerOrEventListenerObject;
private boundOnWindowOnline: EventListenerOrEventListenerObject;
private wasWindowOffline = false;
private healthChecker: HealthChecker;
constructor(uaFactory: UAFactory, options: IClientOptions) {
super();
this.uaFactory = uaFactory;
this.configure(options);
this.boundOnWindowOffline = this.onWindowOffline.bind(this);
this.boundOnWindowOnline = this.tryUntilConnected.bind(this);
window.addEventListener('offline', this.boundOnWindowOffline);
window.addEventListener('online', this.boundOnWindowOnline);
}
public configure(options: IClientOptions) {
const { account, transport, userAgentString } = options;
const uri = UserAgent.makeURI(account.uri);
const modifiers = [Web.Modifiers.stripVideo];
if (Features.isSafari) {
modifiers.push(Web.Modifiers.stripG722);
}
this.uaOptions = {
autoStart: false,
autoStop: false,
noAnswerTimeout: 60,
authorizationUsername: account.user,
authorizationPassword: account.password,
logConnector: connector,
logLevel: 'warn',
sessionDescriptionHandlerFactory,
sessionDescriptionHandlerFactoryOptions: {
alwaysAcquireMediaFirst: Features.isFirefox,
constraints: { audio: true, video: false },
modifiers,
peerConnectionOptions: {
rtcConfiguration: {
iceServers: transport.iceServers.map((s: string) => ({ urls: s }))
}
}
},
transportConstructor: WrappedTransport,
transportOptions: {
maxReconnectionAttempts: 0,
traceSip: true,
wsServers: transport.wsServers
},
uri,
userAgentString
};
}
// Connect (and subsequently register) to server
public async connect() {
if (this.status === ClientStatus.RECOVERING) {
return Promise.reject(new Error('Can not connect while trying to recover.'));
}
if (this.status === ClientStatus.CONNECTED) {
log.info('Already registered.', this.constructor.name);
return this.registeredPromise;
}
this.updateStatus(ClientStatus.CONNECTING);
if (!this.userAgent) {
log.debug('Configuring UA.', this.constructor.name);
this.configureUA(this.uaOptions);
}
if (this.unregisteredPromise) {
log.info(
'Cannot connect while unregistering takes place. Waiting until unregistering is resolved.',
this.constructor.name
);
await this.unregisteredPromise;
}
if (this.registeredPromise) {
return this.registeredPromise;
}
await pTimeout(this.userAgent.start(), this.wsTimeout, () => {
log.info('Could not connect to the websocket in time.', this.constructor.name);
return Promise.reject(new Error('Could not connect to the websocket in time.'));
});
this.createHealthChecker();
this.registeredPromise = this.createRegisteredPromise();
this.registerer.register();
return this.registeredPromise.then(success => {
this.healthChecker.start();
return success;
});
}
// Unregister (and subsequently disconnect) to server.
public async disconnect({ hasRegistered = true }): Promise<void> {
if (!this.userAgent || this.status === ClientStatus.DISCONNECTED) {
log.info('Already disconnected.', this.constructor.name);
return;
}
this.updateStatus(ClientStatus.DISCONNECTING);
delete this.registeredPromise;
// To avoid sending OPTIONS requests after disconnecting.
this.stopHealthChecker();
// Unregistering is not possible when the socket connection is closed/interrupted
// - by the server during a call
// - by a network node during a call
// - by the client during a call (browser accidentally killing ws)
if (hasRegistered) {
this.unregisteredPromise = this.createUnregisteredPromise();
log.info('Trying to unregister.', this.constructor.name);
this.unregisterer.unregister();
// Little protection to make sure our account is actually unregistered
// and received an ACK before other functions are called
// (i.e. ua.disconnect)
await this.unregisteredPromise;
log.info('Unregistered.', this.constructor.name);
}
await this.userAgent.stop();
await this.userAgent.transport.disconnect(); // This calls our patched disconnectPromise.
this.updateStatus(ClientStatus.DISCONNECTED);
log.info('Disconnected.', this.constructor.name);
this.userAgent.transport.removeAllListeners();
delete this.userAgent;
delete this.unregisteredPromise;
}
public createInviter(phoneNumber: string): Inviter {
if (this.status !== ClientStatus.CONNECTED) {
log.info('Could not send an invite. Not connected.', this.constructor.name);
throw new Error('Cannot send an invite. Not connected.');
}
return new Inviter(this.userAgent, UserAgent.makeURI(phoneNumber));
}
public createSubscriber(contact: string): Subscriber {
// Introducing a jitter here, to avoid thundering herds.
return new Subscriber(this.userAgent, UserAgent.makeURI(contact), 'dialog', {
expires: SIP_PRESENCE_EXPIRE + jitter(SIP_PRESENCE_EXPIRE, 30)
});
}
public createPublisher(contact: string, options: PublisherOptions) {
return new Publisher(this.userAgent, UserAgent.makeURI(contact), 'dialog', options);
}
public isRegistered() {
return this.registeredPromise;
}
public async getConnection(mode: ReconnectionMode = ReconnectionMode.ONCE): Promise<boolean> {
if (!this.userAgent) {
return false;
}
if (ClientStatus.DISCONNECTED === this.status) {
return false;
}
const isOnline = await this.isOnline(mode);
log.debug(`isOnline: ${isOnline}`, this.constructor.name);
if (isOnline) {
await this.userAgent.transport.disconnect();
log.debug('Socket closed', this.constructor.name);
await this.userAgent.transport.connect();
log.debug('Socket opened', this.constructor.name);
this.registeredPromise = this.createRegisteredPromise();
this.registerer.register();
this.createHealthChecker();
await this.registeredPromise;
log.debug('Reregistered!', this.constructor.name);
this.healthChecker.start();
// Before the dyingCounter reached 0, there is a decent chance our
// sessions are still alive and kicking. Let's try to revive them.
if (this.dyingCounter !== 0) {
this.emit('reviveSessions');
}
this.emit('reviveSubscriptions');
this.updateStatus(ClientStatus.CONNECTED);
this.retry.timeout = 250;
}
return isOnline;
}
public updatePriority(flag: boolean): void {
this.priority = flag;
log.debug(`Priority is ${flag}`, this.constructor.name);
}
public close(): void {
window.removeEventListener('online', this.boundOnWindowOnline);
window.removeEventListener('offline', this.boundOnWindowOffline);
}
private updateStatus(status: ClientStatus): void {
if (this.status === status) {
return;
}
this.status = status;
this.emit('statusUpdate', status);
}
private isOnlinePromise(mode: ReconnectionMode) {
return new Promise(resolve => {
const checkSocket = new WebSocket(this.uaOptions.transportOptions.wsServers, 'sip');
const handlers = {
onError: e => {
log.debug(e, this.constructor.name);
checkSocket.removeEventListener('open', handlers.onOpen);
// In the case that mode is BURST, throw an error which can be
// catched by pRetry.
if (mode === ReconnectionMode.BURST) {
throw new Error('it broke woops');
}
resolve(false);
},
onOpen: () => {
log.debug('Opening a socket to sip server worked.', this.constructor.name);
checkSocket.close();
checkSocket.removeEventListener('error', handlers.onError);
resolve(true);
}
};
checkSocket.addEventListener('open', handlers.onOpen);
checkSocket.addEventListener('error', handlers.onError);
});
}
private configureUA(options: UserAgentOptions) {
this.userAgent = this.uaFactory(options);
this.userAgent.delegate = {
onInvite: (invitation: Invitation) => {
// Patch the onCancel delegate function to parse the reason of
// cancellation. This is then used by the terminatedPromise of
// a Session to return the reason when a session is terminated.
const cancelled = { reason: undefined };
const onCancel = (invitation as any).incomingInviteRequest.delegate.onCancel;
(invitation as any).incomingInviteRequest.delegate.onCancel = (
message: Core.IncomingRequestMessage
) => {
const reason = this.parseHeader(message.getHeader('reason'));
cancelled.reason = reason ? CANCELLED_REASON[reason.get('text')] : undefined;
onCancel(message);
};
this.emit('invite', { invitation, cancelled });
}
};
if (this.userAgent.userAgentCore) {
// The following onInvite function is taken from:
// SIP.js 0.15.6, file: src/api/user-agent.ts, line: 666.
//
// See "Delegate before invitation handling" below for the patch.
//
// FIXME Keep this up to date with SIP versions, or is there better way to do this?
this.userAgent.userAgentCore.delegate.onInvite = async (
incomingInviteRequest: IncomingInviteRequest
): Promise<void> => {
const invitation = new Invitation(this.userAgent, incomingInviteRequest);
const ua = this.userAgent as any; // Cast to any so we can access private and protected properties.
incomingInviteRequest.delegate = {
onCancel: (cancel: IncomingRequestMessage): void => {
invitation.onCancel(cancel);
},
onTransportError: (error: TransportError): void => {
// A server transaction MUST NOT discard transaction state based only on
// encountering a non-recoverable transport error when sending a
// response. Instead, the associated INVITE server transaction state
// machine MUST remain in its current state. (Timers will eventually
// cause it to transition to the "Terminated" state).
// https://tools.ietf.org/html/rfc6026#section-7.1
// As noted in the comment above, we are to leaving it to the transaction
// timers to evenutally cause the transaction to sort itself out in the case
// of a transport failure in an invite server transaction. This delegate method
// is here simply here for completeness and to make it clear that it provides
// nothing more than informational hook into the core. That is, if you think
// you should be trying to deal with a transport error here, you are likely wrong.
log.error(
'A transport error has occured while handling an incoming INVITE request.',
this.constructor.name
);
}
};
// FIXME: Ported - 100 Trying send should be configurable.
// Only required if TU will not respond in 200ms.
// https://tools.ietf.org/html/rfc3261#section-17.2.1
incomingInviteRequest.trying();
// The Replaces header contains information used to match an existing
// SIP dialog (call-id, to-tag, and from-tag). Upon receiving an INVITE
// with a Replaces header, the User Agent (UA) attempts to match this
// information with a confirmed or early dialog.
// https://tools.ietf.org/html/rfc3891#section-3
if (ua.options.sipExtensionReplaces !== SIPExtension.Unsupported) {
const message = incomingInviteRequest.message;
const replaces = message.parseHeader('replaces');
if (replaces) {
const callId = replaces.call_id;
if (typeof callId !== 'string') {
throw new Error('Type of call id is not string');
}
const toTag = replaces.replaces_to_tag;
if (typeof toTag !== 'string') {
throw new Error('Type of to tag is not string');
}
const fromTag = replaces.replaces_from_tag;
if (typeof fromTag !== 'string') {
throw new Error('type of from tag is not string');
}
const targetDialogId = callId + toTag + fromTag;
const targetDialog = ua.userAgentCore.dialogs.get(targetDialogId);
// If no match is found, the UAS rejects the INVITE and returns a 481
// Call/Transaction Does Not Exist response. Likewise, if the Replaces
// header field matches a dialog which was not created with an INVITE,
// the UAS MUST reject the request with a 481 response.
// https://tools.ietf.org/html/rfc3891#section-3
if (!targetDialog) {
invitation.reject({ statusCode: 481 });
return;
}
// If the Replaces header field matches a confirmed dialog, it checks
// for the presence of the "early-only" flag in the Replaces header
// field. (This flag allows the UAC to prevent a potentially
// undesirable race condition described in Section 7.1.) If the flag is
// present, the UA rejects the request with a 486 Busy response.
// https://tools.ietf.org/html/rfc3891#section-3
if (!targetDialog.early && replaces.early_only === true) {
invitation.reject({ statusCode: 486 });
return;
}
// Provide a handle on the session being replaced.
const targetSession =
ua.sessions[callId + fromTag] || ua.sessions[callId + toTag] || undefined;
if (!targetSession) {
throw new Error('Session does not exist.');
}
invitation.replacee = targetSession;
}
}
// A common scenario occurs when the callee is currently not willing or
// able to take additional calls at this end system. A 486 (Busy Here)
// SHOULD be returned in such a scenario.
// https://tools.ietf.org/html/rfc3261#section-13.3.1.3
if (!ua.delegate || !ua.delegate.onInvite) {
invitation.reject({ statusCode: 486 });
return;
}
// Delegate before invitation handling.
if (this.delegate && this.delegate.onBeforeInvite) {
const stopEarly = await this.delegate.onBeforeInvite(invitation);
if (stopEarly) {
return;
}
}
// Delegate invitation handling.
if (!invitation.autoSendAnInitialProvisionalResponse) {
ua.delegate.onInvite(invitation);
} else {
const onInvite = ua.delegate.onInvite;
invitation.progress().then(() => onInvite(invitation));
}
};
} else {
log.error('UserAgent does not seem to have a UserAgentCore', this.constructor.name);
}
this.userAgent.transport.on('disconnected', this.onTransportDisconnected.bind(this));
}
private isOnline(mode: ReconnectionMode): Promise<any> {
const hasConfiguredWsServer =
this.uaOptions &&
this.uaOptions.transportOptions &&
this.uaOptions.transportOptions.wsServers;
if (!hasConfiguredWsServer) {
return Promise.resolve(false);
}
const tryOpeningSocketWithTimeout = () =>
pTimeout(this.isOnlinePromise(mode), 5000, () => {
// In the case that mode is BURST, throw an error which can be
// catched by pRetry.
if (mode === ReconnectionMode.BURST) {
throw new Error('Cannot open socket. Probably DNS failure.');
}
return Promise.resolve(false);
});
// In the case that mode is ONCE, a new socket is created once, also with
// a timeout of 500 ms.
if (mode === ReconnectionMode.ONCE) {
log.debug('Trying to reconnect once.', this.constructor.name);
return tryOpeningSocketWithTimeout();
}
log.debug('Trying to reconnect asap.', this.constructor.name);
// In the case that mode is BURST, a new socket is created roughly every
// 500 ms to be able to quickly revive our connection once that succeeds.
const retryOptions = {
forever: true,
maxTimeout: 100, // Note: this is time between retries, not time before operation times out
minTimeout: 100,
onFailedAttempt: error => {
log.debug(
`Connection attempt ${error.attemptNumber} failed. There are ${error.retriesLeft} retries left.`,
this.constructor.name
);
}
};
const retryForever = pRetry(() => {
// It could happen that this function timed out. Because this is a
// async function we check the client status to stop this loop.
if (this.status === ClientStatus.DISCONNECTED) {
throw new pRetry.AbortError("It's no use. Stop trying to recover");
}
return tryOpeningSocketWithTimeout();
}, retryOptions);
return pTimeout(retryForever, this.dyingCounter, () => {
log.info(
'We could not recover the session(s) within 1 minute. ' +
'After this time the SIP server has terminated the session(s).',
this.constructor.name
);
return Promise.resolve(false);
});
}
/**
* This function is generally called after a window 'online' event or
* after an ua.transport 'disconnected' event.
*
* In the scenario where the SIP server goes offline, or a socket stops
* working, ua.transport emits a 'disconnected' event. When this happens
* for a multitude of clients, all of those clients would be
* reconnecting at the same time.
*
* To avoid this, we divide those clients in two groups:
* - Clients that are in a call (priority === true)
* - Clients that are not in a call (priority === false)
*
* Clients that are in a call can recover as soon as possible, where
* clients that are not in a call have to wait an amount of time which
* increments every failure, before reconnecting to the server.
*/
private async tryUntilConnected({ skipCheck }: { skipCheck: boolean } = { skipCheck: false }) {
// To avoid triggering multiple times, return if status is recovering.
if (ClientStatus.RECOVERING === this.status && !skipCheck) {
return;
}
this.updateStatus(ClientStatus.RECOVERING);
if (this.priority) {
const connected = await this.getConnection(ReconnectionMode.BURST);
this.onAfterGetConnection(connected);
return;
}
log.debug(
`Reconnecting in ${this.retry.timeout / second}s to avoid thundering herd`,
this.constructor.name
);
setTimeout(async () => {
// Only trigger this function if we haven't reconnected in the same time.
if (this.status !== ClientStatus.CONNECTED) {
const connected = await this.getConnection(ReconnectionMode.ONCE);
this.onAfterGetConnection(connected);
}
}, this.retry.timeout);
this.retry = increaseTimeout(this.retry);
}
private createRegisteredPromise() {
if (this.registerer) {
// Remove from UA's collection, not using this.registerer.dispose to
// avoid unregistering.
delete this.userAgent.registerers[(this.registerer as any).id];
}
this.registerer = new Registerer(this.userAgent, {});
return new Promise((resolve, reject) => {
// Handle outgoing session state changes.
this.registerer.stateChange.once(async (newState: RegistererState) => {
switch (newState) {
case RegistererState.Registered:
this.updateStatus(ClientStatus.CONNECTED);
resolve(true);
break;
case RegistererState.Unregistered:
await this.disconnect({ hasRegistered: false });
this.updateStatus(ClientStatus.DISCONNECTED);
log.error('Could not register.', this.constructor.name);
reject(new Error('Could not register.'));
break;
default:
break;
}
});
});
}
private createUnregisteredPromise() {
if (this.unregisterer) {
// Remove from UA's collection, not using this.registerer.dispose to
// avoid unregistering.
delete this.userAgent.registerers[(this.unregisterer as any).id];
}
this.unregisterer = new Registerer(this.userAgent);
return new Promise((resolve, reject) => {
// Handle outgoing session state changes.
this.unregisterer.stateChange.once(async (newState: RegistererState) => {
if (newState === RegistererState.Unregistered) {
log.info('State changed to Unregistered.', this.constructor.name);
resolve(true);
}
});
});
}
private onWindowOffline() {
log.info('We appear to be offline.', this.constructor.name);
this.wasWindowOffline = true;
this.updateStatus(ClientStatus.DYING);
this.registeredPromise = undefined;
if (this.dyingIntervalID) {
return;
}
const subtractValue = 500;
const subtractTillDead = () => {
this.dyingCounter -= subtractValue;
if (this.dyingCounter === 0) {
clearInterval(this.dyingIntervalID);
this.dyingIntervalID = undefined;
// As the counter reached 0, there are no calls left over. Thus the
// reconnection strategy does not have to prioritize this client
// anymore.
log.debug(
'Priority set to false. Our call was probably terminated by the SIP server.',
this.constructor.name
);
this.priority = false;
}
};
this.dyingIntervalID = window.setInterval(subtractTillDead, subtractValue);
}
private stopHealthChecker() {
if (this.healthChecker) {
this.healthChecker.stop();
delete this.healthChecker;
}
}
private createHealthChecker() {
this.stopHealthChecker();
this.healthChecker = new HealthChecker(this.userAgent);
}
private onTransportDisconnected() {
// There are different scenarios in place for when the transport might
// have disconnected. One is loss of internet, this is detected by the
// offline listener on window. Whenever this happens, it is inevitable that
// the transport disconnected event will trigger. In that case we do not
// want to try to reconnect, because there is no internet so it will be
// futile. In this scenario, when internet comes back, tryUntilConnected
// will be triggered by the window online event. The
// 'wasWindowOffline' should account for these events. If internet
// connectivity is lost and the transport disconnected event triggers,
// we make sure to avoid triggering this function 2 times.
//
// Then there is another scenario that we have to account for, namely the
// scenario where the transport is disconnected because the socket
// connection to the sip server is lost somehow, while there still was
// internet. In that case, the 'window' events won't help us. That is why
// we can call tryUntilConnected in that case, because the
// 'wasWindowOffline' will be false.
log.debug('Transport disconnected..', this.constructor.name);
this.emit('transportDisconnected');
if (!this.wasWindowOffline) {
log.debug(
'Transport disconnected while there is internet, trying to reconnect',
this.constructor.name
);
this.tryUntilConnected();
}
this.wasWindowOffline = false;
}
private async onAfterGetConnection(connected: boolean) {
if (connected) {
// To make sure that the dying counter can be used again.
clearInterval(this.dyingIntervalID);
this.dyingIntervalID = undefined;
this.dyingCounter = 60000;
log.info('We appear to be connected.', this.constructor.name);
return;
}
this.tryUntilConnected({ skipCheck: true });
}
/**
* Convert a comma-separated string like:
* `SIP;cause=200;text="Call completed elsewhere` to a Map.
* @param {string} header - The header to parse.
* @returns {Map} - A map of key/values of the header.
*/
private parseHeader(header?: string): Map<string, string> {
if (header) {
return new Map(
header
.replace(/"/g, '')
.split(';')
.map(i => i.split('=') as [string, string])
);
} else {
return undefined;
}
}
} | the_stack |
import { iavlSpec, ics23, tendermintSpec, verifyExistence, verifyNonExistence } from "@confio/ics23";
import { toAscii, toHex } from "@cosmjs/encoding";
import { firstEvent } from "@cosmjs/stream";
import { tendermint34, Tendermint34Client } from "@cosmjs/tendermint-rpc";
import { arrayContentEquals, assert, assertDefined, isNonNullObject, sleep } from "@cosmjs/utils";
import { ProofOps } from "cosmjs-types/tendermint/crypto/proof";
import { Stream } from "xstream";
type QueryExtensionSetup<P> = (base: QueryClient) => P;
function checkAndParseOp(op: tendermint34.ProofOp, kind: string, key: Uint8Array): ics23.CommitmentProof {
if (op.type !== kind) {
throw new Error(`Op expected to be ${kind}, got "${op.type}`);
}
if (!arrayContentEquals(key, op.key)) {
throw new Error(`Proven key different than queried key.\nQuery: ${toHex(key)}\nProven: ${toHex(op.key)}`);
}
return ics23.CommitmentProof.decode(op.data);
}
export interface ProvenQuery {
readonly key: Uint8Array;
readonly value: Uint8Array;
readonly proof: ProofOps;
readonly height: number;
}
export class QueryClient {
/** Constructs a QueryClient with 0 extensions */
public static withExtensions(tmClient: Tendermint34Client): QueryClient;
/** Constructs a QueryClient with 1 extension */
public static withExtensions<A extends object>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
): QueryClient & A;
/** Constructs a QueryClient with 2 extensions */
public static withExtensions<A extends object, B extends object>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
): QueryClient & A & B;
/** Constructs a QueryClient with 3 extensions */
public static withExtensions<A extends object, B extends object, C extends object>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
): QueryClient & A & B & C;
/** Constructs a QueryClient with 4 extensions */
public static withExtensions<A extends object, B extends object, C extends object, D extends object>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
): QueryClient & A & B & C & D;
/** Constructs a QueryClient with 5 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
): QueryClient & A & B & C & D & E;
/** Constructs a QueryClient with 6 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
): QueryClient & A & B & C & D & E & F;
/** Constructs a QueryClient with 7 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
): QueryClient & A & B & C & D & E & F & G;
/** Constructs a QueryClient with 8 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
): QueryClient & A & B & C & D & E & F & G & H;
/** Constructs a QueryClient with 9 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
): QueryClient & A & B & C & D & E & F & G & H & I;
/** Constructs a QueryClient with 10 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
): QueryClient & A & B & C & D & E & F & G & H & I & J;
/** Constructs a QueryClient with 11 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K;
/** Constructs a QueryClient with 12 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L;
/** Constructs a QueryClient with 13 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M;
/** Constructs a QueryClient with 14 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
N extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
setupExtensionN: QueryExtensionSetup<N>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M & N;
/** Constructs a QueryClient with 15 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
N extends object,
O extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
setupExtensionN: QueryExtensionSetup<N>,
setupExtensionO: QueryExtensionSetup<O>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M & N & O;
/** Constructs a QueryClient with 16 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
N extends object,
O extends object,
P extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
setupExtensionN: QueryExtensionSetup<N>,
setupExtensionO: QueryExtensionSetup<O>,
setupExtensionP: QueryExtensionSetup<P>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P;
/** Constructs a QueryClient with 17 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
N extends object,
O extends object,
P extends object,
Q extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
setupExtensionN: QueryExtensionSetup<N>,
setupExtensionO: QueryExtensionSetup<O>,
setupExtensionP: QueryExtensionSetup<P>,
setupExtensionQ: QueryExtensionSetup<Q>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q;
/** Constructs a QueryClient with 18 extensions */
public static withExtensions<
A extends object,
B extends object,
C extends object,
D extends object,
E extends object,
F extends object,
G extends object,
H extends object,
I extends object,
J extends object,
K extends object,
L extends object,
M extends object,
N extends object,
O extends object,
P extends object,
Q extends object,
R extends object,
>(
tmClient: Tendermint34Client,
setupExtensionA: QueryExtensionSetup<A>,
setupExtensionB: QueryExtensionSetup<B>,
setupExtensionC: QueryExtensionSetup<C>,
setupExtensionD: QueryExtensionSetup<D>,
setupExtensionE: QueryExtensionSetup<E>,
setupExtensionF: QueryExtensionSetup<F>,
setupExtensionG: QueryExtensionSetup<G>,
setupExtensionH: QueryExtensionSetup<H>,
setupExtensionI: QueryExtensionSetup<I>,
setupExtensionJ: QueryExtensionSetup<J>,
setupExtensionK: QueryExtensionSetup<K>,
setupExtensionL: QueryExtensionSetup<L>,
setupExtensionM: QueryExtensionSetup<M>,
setupExtensionN: QueryExtensionSetup<N>,
setupExtensionO: QueryExtensionSetup<O>,
setupExtensionP: QueryExtensionSetup<P>,
setupExtensionQ: QueryExtensionSetup<Q>,
setupExtensionR: QueryExtensionSetup<R>,
): QueryClient & A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R;
public static withExtensions(
tmClient: Tendermint34Client,
...extensionSetups: Array<QueryExtensionSetup<object>>
): any {
const client = new QueryClient(tmClient);
const extensions = extensionSetups.map((setupExtension) => setupExtension(client));
for (const extension of extensions) {
assert(isNonNullObject(extension), `Extension must be a non-null object`);
for (const [moduleKey, moduleValue] of Object.entries(extension)) {
assert(
isNonNullObject(moduleValue),
`Module must be a non-null object. Found type ${typeof moduleValue} for module "${moduleKey}".`,
);
const current = (client as any)[moduleKey] || {};
(client as any)[moduleKey] = {
...current,
...moduleValue,
};
}
}
return client;
}
private readonly tmClient: Tendermint34Client;
public constructor(tmClient: Tendermint34Client) {
this.tmClient = tmClient;
}
public async queryVerified(store: string, key: Uint8Array, desiredHeight?: number): Promise<Uint8Array> {
const { height, proof, value } = await this.queryRawProof(store, key, desiredHeight);
const subProof = checkAndParseOp(proof.ops[0], "ics23:iavl", key);
const storeProof = checkAndParseOp(proof.ops[1], "ics23:simple", toAscii(store));
// this must always be existence, if the store is not a typo
assert(storeProof.exist);
assert(storeProof.exist.value);
// this may be exist or non-exist, depends on response
if (!value || value.length === 0) {
// non-existence check
assert(subProof.nonexist);
// the subproof must map the desired key to the "value" of the storeProof
verifyNonExistence(subProof.nonexist, iavlSpec, storeProof.exist.value, key);
} else {
// existence check
assert(subProof.exist);
assert(subProof.exist.value);
// the subproof must map the desired key to the "value" of the storeProof
verifyExistence(subProof.exist, iavlSpec, storeProof.exist.value, key, value);
}
// the store proof must map its declared value (root of subProof) to the appHash of the next block
const header = await this.getNextHeader(height);
verifyExistence(storeProof.exist, tendermintSpec, header.appHash, toAscii(store), storeProof.exist.value);
return value;
}
public async queryRawProof(
store: string,
queryKey: Uint8Array,
desiredHeight?: number,
): Promise<ProvenQuery> {
const { key, value, height, proof, code, log } = await this.tmClient.abciQuery({
// we need the StoreKey for the module, not the module name
// https://github.com/cosmos/cosmos-sdk/blob/8cab43c8120fec5200c3459cbf4a92017bb6f287/x/auth/types/keys.go#L12
path: `/store/${store}/key`,
data: queryKey,
prove: true,
height: desiredHeight,
});
if (code) {
throw new Error(`Query failed with (${code}): ${log}`);
}
if (!arrayContentEquals(queryKey, key)) {
throw new Error(`Response key ${toHex(key)} doesn't match query key ${toHex(queryKey)}`);
}
if (!height) {
throw new Error("No query height returned");
}
if (!proof || proof.ops.length !== 2) {
throw new Error(`Expected 2 proof ops, got ${proof?.ops.length ?? 0}. Are you using stargate?`);
}
// we don't need the results, but we can ensure the data is the proper format
checkAndParseOp(proof.ops[0], "ics23:iavl", key);
checkAndParseOp(proof.ops[1], "ics23:simple", toAscii(store));
return {
key: key,
value: value,
height: height,
// need to clone this: readonly input / writeable output
proof: {
ops: [...proof.ops],
},
};
}
public async queryUnverified(path: string, request: Uint8Array): Promise<Uint8Array> {
const response = await this.tmClient.abciQuery({
path: path,
data: request,
prove: false,
});
if (response.code) {
throw new Error(`Query failed with (${response.code}): ${response.log}`);
}
return response.value;
}
// this must return the header for height+1
// throws an error if height is 0 or undefined
private async getNextHeader(height?: number): Promise<tendermint34.Header> {
assertDefined(height);
if (height === 0) {
throw new Error("Query returned height 0, cannot prove it");
}
const searchHeight = height + 1;
let nextHeader: tendermint34.Header | undefined;
let headersSubscription: Stream<tendermint34.NewBlockHeaderEvent> | undefined;
try {
headersSubscription = this.tmClient.subscribeNewBlockHeader();
} catch {
// Ignore exception caused by non-WebSocket Tendermint clients
}
if (headersSubscription) {
const firstHeader = await firstEvent(headersSubscription);
// The first header we get might not be n+1 but n+2 or even higher. In such cases we fall back on a query.
if (firstHeader.height === searchHeight) {
nextHeader = firstHeader;
}
}
while (!nextHeader) {
// start from current height to avoid backend error for minHeight in the future
const correctHeader = (await this.tmClient.blockchain(height, searchHeight)).blockMetas
.map((meta) => meta.header)
.find((h) => h.height === searchHeight);
if (correctHeader) {
nextHeader = correctHeader;
} else {
await sleep(1000);
}
}
assert(nextHeader.height === searchHeight, "Got wrong header. This is a bug in the logic above.");
return nextHeader;
}
} | the_stack |
import { CachedArray } from '../../memop/cached-array';
import { error, errorID } from '../../platform';
import { debug } from '../../platform/debug';
import {
BufferUsageBit, ColorMask, CullMode, DynamicStateFlagBit, Filter, Format, TextureType, Type, FormatInfo,
FormatInfos, FormatSize, LoadOp, MemoryUsageBit, SampleCount, ShaderStageFlagBit, TextureFlagBit,
Color, Rect, BufferTextureCopy, BufferSource, DrawInfo, IndirectBuffer, UniformBlock, DynamicStates,
UniformSamplerTexture,
} from '../base/define';
import { WebGL2EXT } from './webgl2-define';
import { WebGL2CommandAllocator } from './webgl2-command-allocator';
import { WebGL2Device } from './webgl2-device';
import {
IWebGL2GPUInputAssembler,
IWebGL2Attrib,
IWebGL2GPUDescriptorSet,
IWebGL2GPUBuffer,
IWebGL2GPUFramebuffer,
IWebGL2GPUInput,
IWebGL2GPUPipelineState,
IWebGL2GPUSampler,
IWebGL2GPUShader,
IWebGL2GPUTexture,
IWebGL2GPUUniformBlock,
IWebGL2GPUUniformSamplerTexture,
IWebGL2GPURenderPass,
} from './webgl2-gpu-objects';
const WebGLWraps: GLenum[] = [
0x2901, // WebGLRenderingContext.REPEAT
0x8370, // WebGLRenderingContext.MIRRORED_REPEAT
0x812F, // WebGLRenderingContext.CLAMP_TO_EDGE
0x812F, // WebGLRenderingContext.CLAMP_TO_EDGE
];
const _f32v4 = new Float32Array(4);
function CmpF32NotEuqal (a: number, b: number): boolean {
const c = a - b;
return (c > 0.000001 || c < -0.000001);
}
export function GFXFormatToWebGLType (format: Format, gl: WebGL2RenderingContext): GLenum {
switch (format) {
case Format.R8: return gl.UNSIGNED_BYTE;
case Format.R8SN: return gl.BYTE;
case Format.R8UI: return gl.UNSIGNED_BYTE;
case Format.R8I: return gl.BYTE;
case Format.R16F: return gl.HALF_FLOAT;
case Format.R16UI: return gl.UNSIGNED_SHORT;
case Format.R16I: return gl.SHORT;
case Format.R32F: return gl.FLOAT;
case Format.R32UI: return gl.UNSIGNED_INT;
case Format.R32I: return gl.INT;
case Format.RG8: return gl.UNSIGNED_BYTE;
case Format.RG8SN: return gl.BYTE;
case Format.RG8UI: return gl.UNSIGNED_BYTE;
case Format.RG8I: return gl.BYTE;
case Format.RG16F: return gl.HALF_FLOAT;
case Format.RG16UI: return gl.UNSIGNED_SHORT;
case Format.RG16I: return gl.SHORT;
case Format.RG32F: return gl.FLOAT;
case Format.RG32UI: return gl.UNSIGNED_INT;
case Format.RG32I: return gl.INT;
case Format.RGB8: return gl.UNSIGNED_BYTE;
case Format.SRGB8: return gl.UNSIGNED_BYTE;
case Format.RGB8SN: return gl.BYTE;
case Format.RGB8UI: return gl.UNSIGNED_BYTE;
case Format.RGB8I: return gl.BYTE;
case Format.RGB16F: return gl.HALF_FLOAT;
case Format.RGB16UI: return gl.UNSIGNED_SHORT;
case Format.RGB16I: return gl.SHORT;
case Format.RGB32F: return gl.FLOAT;
case Format.RGB32UI: return gl.UNSIGNED_INT;
case Format.RGB32I: return gl.INT;
case Format.BGRA8: return gl.UNSIGNED_BYTE;
case Format.RGBA8: return gl.UNSIGNED_BYTE;
case Format.SRGB8_A8: return gl.UNSIGNED_BYTE;
case Format.RGBA8SN: return gl.BYTE;
case Format.RGBA8UI: return gl.UNSIGNED_BYTE;
case Format.RGBA8I: return gl.BYTE;
case Format.RGBA16F: return gl.HALF_FLOAT;
case Format.RGBA16UI: return gl.UNSIGNED_SHORT;
case Format.RGBA16I: return gl.SHORT;
case Format.RGBA32F: return gl.FLOAT;
case Format.RGBA32UI: return gl.UNSIGNED_INT;
case Format.RGBA32I: return gl.INT;
case Format.R5G6B5: return gl.UNSIGNED_SHORT_5_6_5;
case Format.R11G11B10F: return gl.UNSIGNED_INT_10F_11F_11F_REV;
case Format.RGB5A1: return gl.UNSIGNED_SHORT_5_5_5_1;
case Format.RGBA4: return gl.UNSIGNED_SHORT_4_4_4_4;
case Format.RGB10A2: return gl.UNSIGNED_INT_2_10_10_10_REV;
case Format.RGB10A2UI: return gl.UNSIGNED_INT_2_10_10_10_REV;
case Format.RGB9E5: return gl.FLOAT;
case Format.DEPTH: return gl.FLOAT;
case Format.DEPTH_STENCIL: return gl.FLOAT_32_UNSIGNED_INT_24_8_REV;
case Format.BC1: return gl.UNSIGNED_BYTE;
case Format.BC1_SRGB: return gl.UNSIGNED_BYTE;
case Format.BC2: return gl.UNSIGNED_BYTE;
case Format.BC2_SRGB: return gl.UNSIGNED_BYTE;
case Format.BC3: return gl.UNSIGNED_BYTE;
case Format.BC3_SRGB: return gl.UNSIGNED_BYTE;
case Format.BC4: return gl.UNSIGNED_BYTE;
case Format.BC4_SNORM: return gl.BYTE;
case Format.BC5: return gl.UNSIGNED_BYTE;
case Format.BC5_SNORM: return gl.BYTE;
case Format.BC6H_SF16: return gl.FLOAT;
case Format.BC6H_UF16: return gl.FLOAT;
case Format.BC7: return gl.UNSIGNED_BYTE;
case Format.BC7_SRGB: return gl.UNSIGNED_BYTE;
case Format.ETC_RGB8: return gl.UNSIGNED_BYTE;
case Format.ETC2_RGB8: return gl.UNSIGNED_BYTE;
case Format.ETC2_SRGB8: return gl.UNSIGNED_BYTE;
case Format.ETC2_RGB8_A1: return gl.UNSIGNED_BYTE;
case Format.ETC2_SRGB8_A1: return gl.UNSIGNED_BYTE;
case Format.EAC_R11: return gl.UNSIGNED_BYTE;
case Format.EAC_R11SN: return gl.BYTE;
case Format.EAC_RG11: return gl.UNSIGNED_BYTE;
case Format.EAC_RG11SN: return gl.BYTE;
case Format.PVRTC_RGB2: return gl.UNSIGNED_BYTE;
case Format.PVRTC_RGBA2: return gl.UNSIGNED_BYTE;
case Format.PVRTC_RGB4: return gl.UNSIGNED_BYTE;
case Format.PVRTC_RGBA4: return gl.UNSIGNED_BYTE;
case Format.PVRTC2_2BPP: return gl.UNSIGNED_BYTE;
case Format.PVRTC2_4BPP: return gl.UNSIGNED_BYTE;
case Format.ASTC_RGBA_4X4:
case Format.ASTC_RGBA_5X4:
case Format.ASTC_RGBA_5X5:
case Format.ASTC_RGBA_6X5:
case Format.ASTC_RGBA_6X6:
case Format.ASTC_RGBA_8X5:
case Format.ASTC_RGBA_8X6:
case Format.ASTC_RGBA_8X8:
case Format.ASTC_RGBA_10X5:
case Format.ASTC_RGBA_10X6:
case Format.ASTC_RGBA_10X8:
case Format.ASTC_RGBA_10X10:
case Format.ASTC_RGBA_12X10:
case Format.ASTC_RGBA_12X12:
case Format.ASTC_SRGBA_4X4:
case Format.ASTC_SRGBA_5X4:
case Format.ASTC_SRGBA_5X5:
case Format.ASTC_SRGBA_6X5:
case Format.ASTC_SRGBA_6X6:
case Format.ASTC_SRGBA_8X5:
case Format.ASTC_SRGBA_8X6:
case Format.ASTC_SRGBA_8X8:
case Format.ASTC_SRGBA_10X5:
case Format.ASTC_SRGBA_10X6:
case Format.ASTC_SRGBA_10X8:
case Format.ASTC_SRGBA_10X10:
case Format.ASTC_SRGBA_12X10:
case Format.ASTC_SRGBA_12X12:
return gl.UNSIGNED_BYTE;
default: {
return gl.UNSIGNED_BYTE;
}
}
}
export function GFXFormatToWebGLInternalFormat (format: Format, gl: WebGL2RenderingContext): GLenum {
switch (format) {
case Format.A8: return gl.ALPHA;
case Format.L8: return gl.LUMINANCE;
case Format.LA8: return gl.LUMINANCE_ALPHA;
case Format.R8: return gl.R8;
case Format.R8SN: return gl.R8_SNORM;
case Format.R8UI: return gl.R8UI;
case Format.R8I: return gl.R8I;
case Format.RG8: return gl.RG8;
case Format.RG8SN: return gl.RG8_SNORM;
case Format.RG8UI: return gl.RG8UI;
case Format.RG8I: return gl.RG8I;
case Format.RGB8: return gl.RGB8;
case Format.RGB8SN: return gl.RGB8_SNORM;
case Format.RGB8UI: return gl.RGB8UI;
case Format.RGB8I: return gl.RGB8I;
case Format.BGRA8: return gl.RGBA8;
case Format.RGBA8: return gl.RGBA8;
case Format.RGBA8SN: return gl.RGBA8_SNORM;
case Format.RGBA8UI: return gl.RGBA8UI;
case Format.RGBA8I: return gl.RGBA8I;
case Format.R16I: return gl.R16I;
case Format.R16UI: return gl.R16UI;
case Format.R16F: return gl.R16F;
case Format.RG16I: return gl.RG16I;
case Format.RG16UI: return gl.RG16UI;
case Format.RG16F: return gl.RG16F;
case Format.RGB16I: return gl.RGB16I;
case Format.RGB16UI: return gl.RGB16UI;
case Format.RGB16F: return gl.RGB16F;
case Format.RGBA16I: return gl.RGBA16I;
case Format.RGBA16UI: return gl.RGBA16UI;
case Format.RGBA16F: return gl.RGBA16F;
case Format.R32I: return gl.R32I;
case Format.R32UI: return gl.R32UI;
case Format.R32F: return gl.R32F;
case Format.RG32I: return gl.RG32I;
case Format.RG32UI: return gl.RG32UI;
case Format.RG32F: return gl.RG32F;
case Format.RGB32I: return gl.RGB32I;
case Format.RGB32UI: return gl.RGB32UI;
case Format.RGB32F: return gl.RGB32F;
case Format.RGBA32I: return gl.RGBA32I;
case Format.RGBA32UI: return gl.RGBA32UI;
case Format.RGBA32F: return gl.RGBA32F;
case Format.R5G6B5: return gl.RGB565;
case Format.RGB5A1: return gl.RGB5_A1;
case Format.RGBA4: return gl.RGBA4;
case Format.SRGB8: return gl.SRGB8;
case Format.SRGB8_A8: return gl.SRGB8_ALPHA8;
case Format.RGB10A2: return gl.RGB10_A2;
case Format.RGB10A2UI: return gl.RGB10_A2UI;
case Format.R11G11B10F: return gl.R11F_G11F_B10F;
case Format.DEPTH: return gl.DEPTH_COMPONENT32F;
case Format.DEPTH_STENCIL: return gl.DEPTH32F_STENCIL8;
case Format.BC1: return WebGL2EXT.COMPRESSED_RGB_S3TC_DXT1_EXT;
case Format.BC1_ALPHA: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT1_EXT;
case Format.BC1_SRGB: return WebGL2EXT.COMPRESSED_SRGB_S3TC_DXT1_EXT;
case Format.BC1_SRGB_ALPHA: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT;
case Format.BC2: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT3_EXT;
case Format.BC2_SRGB: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT;
case Format.BC3: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT5_EXT;
case Format.BC3_SRGB: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT;
case Format.ETC_RGB8: return WebGL2EXT.COMPRESSED_RGB_ETC1_WEBGL;
case Format.ETC2_RGB8: return WebGL2EXT.COMPRESSED_RGB8_ETC2;
case Format.ETC2_SRGB8: return WebGL2EXT.COMPRESSED_SRGB8_ETC2;
case Format.ETC2_RGB8_A1: return WebGL2EXT.COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2;
case Format.ETC2_SRGB8_A1: return WebGL2EXT.COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2;
case Format.ETC2_RGBA8: return WebGL2EXT.COMPRESSED_RGBA8_ETC2_EAC;
case Format.ETC2_SRGB8_A8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC;
case Format.EAC_R11: return WebGL2EXT.COMPRESSED_R11_EAC;
case Format.EAC_R11SN: return WebGL2EXT.COMPRESSED_SIGNED_R11_EAC;
case Format.EAC_RG11: return WebGL2EXT.COMPRESSED_RG11_EAC;
case Format.EAC_RG11SN: return WebGL2EXT.COMPRESSED_SIGNED_RG11_EAC;
case Format.PVRTC_RGB2: return WebGL2EXT.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
case Format.PVRTC_RGBA2: return WebGL2EXT.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
case Format.PVRTC_RGB4: return WebGL2EXT.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
case Format.PVRTC_RGBA4: return WebGL2EXT.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
case Format.ASTC_RGBA_4X4: return WebGL2EXT.COMPRESSED_RGBA_ASTC_4x4_KHR;
case Format.ASTC_RGBA_5X4: return WebGL2EXT.COMPRESSED_RGBA_ASTC_5x4_KHR;
case Format.ASTC_RGBA_5X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_5x5_KHR;
case Format.ASTC_RGBA_6X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_6x5_KHR;
case Format.ASTC_RGBA_6X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_6x6_KHR;
case Format.ASTC_RGBA_8X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x5_KHR;
case Format.ASTC_RGBA_8X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x6_KHR;
case Format.ASTC_RGBA_8X8: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x8_KHR;
case Format.ASTC_RGBA_10X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x5_KHR;
case Format.ASTC_RGBA_10X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x6_KHR;
case Format.ASTC_RGBA_10X8: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x8_KHR;
case Format.ASTC_RGBA_10X10: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x10_KHR;
case Format.ASTC_RGBA_12X10: return WebGL2EXT.COMPRESSED_RGBA_ASTC_12x10_KHR;
case Format.ASTC_RGBA_12X12: return WebGL2EXT.COMPRESSED_RGBA_ASTC_12x12_KHR;
case Format.ASTC_SRGBA_4X4: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR;
case Format.ASTC_SRGBA_5X4: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR;
case Format.ASTC_SRGBA_5X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR;
case Format.ASTC_SRGBA_6X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR;
case Format.ASTC_SRGBA_6X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR;
case Format.ASTC_SRGBA_8X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR;
case Format.ASTC_SRGBA_8X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR;
case Format.ASTC_SRGBA_8X8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR;
case Format.ASTC_SRGBA_10X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR;
case Format.ASTC_SRGBA_10X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR;
case Format.ASTC_SRGBA_10X8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR;
case Format.ASTC_SRGBA_10X10: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR;
case Format.ASTC_SRGBA_12X10: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR;
case Format.ASTC_SRGBA_12X12: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR;
default: {
console.error('Unsupported Format, convert to WebGL internal format failed.');
return gl.RGBA;
}
}
}
export function GFXFormatToWebGLFormat (format: Format, gl: WebGL2RenderingContext): GLenum {
switch (format) {
case Format.A8: return gl.ALPHA;
case Format.L8: return gl.LUMINANCE;
case Format.LA8: return gl.LUMINANCE_ALPHA;
case Format.R8:
case Format.R8SN: return gl.RED;
case Format.R8UI:
case Format.R8I: return gl.RED;
case Format.RG8:
case Format.RG8SN:
case Format.RG8UI:
case Format.RG8I: return gl.RG;
case Format.RGB8:
case Format.RGB8SN:
case Format.RGB8UI:
case Format.RGB8I: return gl.RGB;
case Format.BGRA8:
case Format.RGBA8:
case Format.RGBA8SN:
case Format.RGBA8UI:
case Format.RGBA8I: return gl.RGBA;
case Format.R16UI:
case Format.R16I:
case Format.R16F: return gl.RED;
case Format.RG16UI:
case Format.RG16I:
case Format.RG16F: return gl.RG;
case Format.RGB16UI:
case Format.RGB16I:
case Format.RGB16F: return gl.RGB;
case Format.RGBA16UI:
case Format.RGBA16I:
case Format.RGBA16F: return gl.RGBA;
case Format.R32UI:
case Format.R32I:
case Format.R32F: return gl.RED;
case Format.RG32UI:
case Format.RG32I:
case Format.RG32F: return gl.RG;
case Format.RGB32UI:
case Format.RGB32I:
case Format.RGB32F: return gl.RGB;
case Format.RGBA32UI:
case Format.RGBA32I:
case Format.RGBA32F: return gl.RGBA;
case Format.RGB10A2: return gl.RGBA;
case Format.R11G11B10F: return gl.RGB;
case Format.R5G6B5: return gl.RGB;
case Format.RGB5A1: return gl.RGBA;
case Format.RGBA4: return gl.RGBA;
case Format.SRGB8: return gl.RGB;
case Format.SRGB8_A8: return gl.RGBA;
case Format.DEPTH: return gl.DEPTH_COMPONENT;
case Format.DEPTH_STENCIL: return gl.DEPTH_STENCIL;
case Format.BC1: return WebGL2EXT.COMPRESSED_RGB_S3TC_DXT1_EXT;
case Format.BC1_ALPHA: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT1_EXT;
case Format.BC1_SRGB: return WebGL2EXT.COMPRESSED_SRGB_S3TC_DXT1_EXT;
case Format.BC1_SRGB_ALPHA: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT;
case Format.BC2: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT3_EXT;
case Format.BC2_SRGB: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT;
case Format.BC3: return WebGL2EXT.COMPRESSED_RGBA_S3TC_DXT5_EXT;
case Format.BC3_SRGB: return WebGL2EXT.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT;
case Format.ETC_RGB8: return WebGL2EXT.COMPRESSED_RGB_ETC1_WEBGL;
case Format.ETC2_RGB8: return WebGL2EXT.COMPRESSED_RGB8_ETC2;
case Format.ETC2_SRGB8: return WebGL2EXT.COMPRESSED_SRGB8_ETC2;
case Format.ETC2_RGB8_A1: return WebGL2EXT.COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2;
case Format.ETC2_SRGB8_A1: return WebGL2EXT.COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2;
case Format.ETC2_RGBA8: return WebGL2EXT.COMPRESSED_RGBA8_ETC2_EAC;
case Format.ETC2_SRGB8_A8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC;
case Format.EAC_R11: return WebGL2EXT.COMPRESSED_R11_EAC;
case Format.EAC_R11SN: return WebGL2EXT.COMPRESSED_SIGNED_R11_EAC;
case Format.EAC_RG11: return WebGL2EXT.COMPRESSED_RG11_EAC;
case Format.EAC_RG11SN: return WebGL2EXT.COMPRESSED_SIGNED_RG11_EAC;
case Format.PVRTC_RGB2: return WebGL2EXT.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
case Format.PVRTC_RGBA2: return WebGL2EXT.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
case Format.PVRTC_RGB4: return WebGL2EXT.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
case Format.PVRTC_RGBA4: return WebGL2EXT.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
case Format.ASTC_RGBA_4X4: return WebGL2EXT.COMPRESSED_RGBA_ASTC_4x4_KHR;
case Format.ASTC_RGBA_5X4: return WebGL2EXT.COMPRESSED_RGBA_ASTC_5x4_KHR;
case Format.ASTC_RGBA_5X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_5x5_KHR;
case Format.ASTC_RGBA_6X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_6x5_KHR;
case Format.ASTC_RGBA_6X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_6x6_KHR;
case Format.ASTC_RGBA_8X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x5_KHR;
case Format.ASTC_RGBA_8X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x6_KHR;
case Format.ASTC_RGBA_8X8: return WebGL2EXT.COMPRESSED_RGBA_ASTC_8x8_KHR;
case Format.ASTC_RGBA_10X5: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x5_KHR;
case Format.ASTC_RGBA_10X6: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x6_KHR;
case Format.ASTC_RGBA_10X8: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x8_KHR;
case Format.ASTC_RGBA_10X10: return WebGL2EXT.COMPRESSED_RGBA_ASTC_10x10_KHR;
case Format.ASTC_RGBA_12X10: return WebGL2EXT.COMPRESSED_RGBA_ASTC_12x10_KHR;
case Format.ASTC_RGBA_12X12: return WebGL2EXT.COMPRESSED_RGBA_ASTC_12x12_KHR;
case Format.ASTC_SRGBA_4X4: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR;
case Format.ASTC_SRGBA_5X4: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR;
case Format.ASTC_SRGBA_5X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR;
case Format.ASTC_SRGBA_6X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR;
case Format.ASTC_SRGBA_6X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR;
case Format.ASTC_SRGBA_8X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR;
case Format.ASTC_SRGBA_8X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR;
case Format.ASTC_SRGBA_8X8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR;
case Format.ASTC_SRGBA_10X5: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR;
case Format.ASTC_SRGBA_10X6: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR;
case Format.ASTC_SRGBA_10X8: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR;
case Format.ASTC_SRGBA_10X10: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR;
case Format.ASTC_SRGBA_12X10: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR;
case Format.ASTC_SRGBA_12X12: return WebGL2EXT.COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR;
default: {
console.error('Unsupported Format, convert to WebGL format failed.');
return gl.RGBA;
}
}
}
function GFXTypeToWebGLType (type: Type, gl: WebGL2RenderingContext): GLenum {
switch (type) {
case Type.BOOL: return gl.BOOL;
case Type.BOOL2: return gl.BOOL_VEC2;
case Type.BOOL3: return gl.BOOL_VEC3;
case Type.BOOL4: return gl.BOOL_VEC4;
case Type.INT: return gl.INT;
case Type.INT2: return gl.INT_VEC2;
case Type.INT3: return gl.INT_VEC3;
case Type.INT4: return gl.INT_VEC4;
case Type.UINT: return gl.UNSIGNED_INT;
case Type.FLOAT: return gl.FLOAT;
case Type.FLOAT2: return gl.FLOAT_VEC2;
case Type.FLOAT3: return gl.FLOAT_VEC3;
case Type.FLOAT4: return gl.FLOAT_VEC4;
case Type.MAT2: return gl.FLOAT_MAT2;
case Type.MAT2X3: return gl.FLOAT_MAT2x3;
case Type.MAT2X4: return gl.FLOAT_MAT2x4;
case Type.MAT3X2: return gl.FLOAT_MAT3x2;
case Type.MAT3: return gl.FLOAT_MAT3;
case Type.MAT3X4: return gl.FLOAT_MAT3x4;
case Type.MAT4X2: return gl.FLOAT_MAT4x2;
case Type.MAT4X3: return gl.FLOAT_MAT4x3;
case Type.MAT4: return gl.FLOAT_MAT4;
case Type.SAMPLER2D: return gl.SAMPLER_2D;
case Type.SAMPLER2D_ARRAY: return gl.SAMPLER_2D_ARRAY;
case Type.SAMPLER3D: return gl.SAMPLER_3D;
case Type.SAMPLER_CUBE: return gl.SAMPLER_CUBE;
default: {
console.error('Unsupported GLType, convert to GL type failed.');
return Type.UNKNOWN;
}
}
}
function WebGLTypeToGFXType (glType: GLenum, gl: WebGL2RenderingContext): Type {
switch (glType) {
case gl.BOOL: return Type.BOOL;
case gl.BOOL_VEC2: return Type.BOOL2;
case gl.BOOL_VEC3: return Type.BOOL3;
case gl.BOOL_VEC4: return Type.BOOL4;
case gl.INT: return Type.INT;
case gl.INT_VEC2: return Type.INT2;
case gl.INT_VEC3: return Type.INT3;
case gl.INT_VEC4: return Type.INT4;
case gl.UNSIGNED_INT: return Type.UINT;
case gl.UNSIGNED_INT_VEC2: return Type.UINT2;
case gl.UNSIGNED_INT_VEC3: return Type.UINT3;
case gl.UNSIGNED_INT_VEC4: return Type.UINT4;
case gl.FLOAT: return Type.FLOAT;
case gl.FLOAT_VEC2: return Type.FLOAT2;
case gl.FLOAT_VEC3: return Type.FLOAT3;
case gl.FLOAT_VEC4: return Type.FLOAT4;
case gl.FLOAT_MAT2: return Type.MAT2;
case gl.FLOAT_MAT2x3: return Type.MAT2X3;
case gl.FLOAT_MAT2x4: return Type.MAT2X4;
case gl.FLOAT_MAT3x2: return Type.MAT3X2;
case gl.FLOAT_MAT3: return Type.MAT3;
case gl.FLOAT_MAT3x4: return Type.MAT3X4;
case gl.FLOAT_MAT4x2: return Type.MAT4X2;
case gl.FLOAT_MAT4x3: return Type.MAT4X3;
case gl.FLOAT_MAT4: return Type.MAT4;
case gl.SAMPLER_2D: return Type.SAMPLER2D;
case gl.SAMPLER_2D_ARRAY: return Type.SAMPLER2D_ARRAY;
case gl.SAMPLER_3D: return Type.SAMPLER3D;
case gl.SAMPLER_CUBE: return Type.SAMPLER_CUBE;
default: {
console.error('Unsupported GLType, convert to Type failed.');
return Type.UNKNOWN;
}
}
}
function WebGLGetTypeSize (glType: GLenum, gl: WebGL2RenderingContext): Type {
switch (glType) {
case gl.BOOL: return 4;
case gl.BOOL_VEC2: return 8;
case gl.BOOL_VEC3: return 12;
case gl.BOOL_VEC4: return 16;
case gl.INT: return 4;
case gl.INT_VEC2: return 8;
case gl.INT_VEC3: return 12;
case gl.INT_VEC4: return 16;
case gl.UNSIGNED_INT: return 4;
case gl.UNSIGNED_INT_VEC2: return 8;
case gl.UNSIGNED_INT_VEC3: return 12;
case gl.UNSIGNED_INT_VEC4: return 16;
case gl.FLOAT: return 4;
case gl.FLOAT_VEC2: return 8;
case gl.FLOAT_VEC3: return 12;
case gl.FLOAT_VEC4: return 16;
case gl.FLOAT_MAT2: return 16;
case gl.FLOAT_MAT2x3: return 24;
case gl.FLOAT_MAT2x4: return 32;
case gl.FLOAT_MAT3x2: return 24;
case gl.FLOAT_MAT3: return 36;
case gl.FLOAT_MAT3x4: return 48;
case gl.FLOAT_MAT4x2: return 32;
case gl.FLOAT_MAT4x3: return 48;
case gl.FLOAT_MAT4: return 64;
case gl.SAMPLER_2D: return 4;
case gl.SAMPLER_2D_ARRAY: return 4;
case gl.SAMPLER_2D_ARRAY_SHADOW: return 4;
case gl.SAMPLER_3D: return 4;
case gl.SAMPLER_CUBE: return 4;
case gl.INT_SAMPLER_2D: return 4;
case gl.INT_SAMPLER_2D_ARRAY: return 4;
case gl.INT_SAMPLER_3D: return 4;
case gl.INT_SAMPLER_CUBE: return 4;
case gl.UNSIGNED_INT_SAMPLER_2D: return 4;
case gl.UNSIGNED_INT_SAMPLER_2D_ARRAY: return 4;
case gl.UNSIGNED_INT_SAMPLER_3D: return 4;
case gl.UNSIGNED_INT_SAMPLER_CUBE: return 4;
default: {
console.error('Unsupported GLType, get type failed.');
return 0;
}
}
}
function WebGLGetComponentCount (glType: GLenum, gl: WebGL2RenderingContext): Type {
switch (glType) {
case gl.FLOAT_MAT2: return 2;
case gl.FLOAT_MAT2x3: return 2;
case gl.FLOAT_MAT2x4: return 2;
case gl.FLOAT_MAT3x2: return 3;
case gl.FLOAT_MAT3: return 3;
case gl.FLOAT_MAT3x4: return 3;
case gl.FLOAT_MAT4x2: return 4;
case gl.FLOAT_MAT4x3: return 4;
case gl.FLOAT_MAT4: return 4;
default: {
return 1;
}
}
}
const WebGLCmpFuncs: GLenum[] = [
0x0200, // WebGLRenderingContext.NEVER,
0x0201, // WebGLRenderingContext.LESS,
0x0202, // WebGLRenderingContext.EQUAL,
0x0203, // WebGLRenderingContext.LEQUAL,
0x0204, // WebGLRenderingContext.GREATER,
0x0205, // WebGLRenderingContext.NOTEQUAL,
0x0206, // WebGLRenderingContext.GEQUAL,
0x0207, // WebGLRenderingContext.ALWAYS,
];
const WebGLStencilOps: GLenum[] = [
0x0000, // WebGLRenderingContext.ZERO,
0x1E00, // WebGLRenderingContext.KEEP,
0x1E01, // WebGLRenderingContext.REPLACE,
0x1E02, // WebGLRenderingContext.INCR,
0x1E03, // WebGLRenderingContext.DECR,
0x150A, // WebGLRenderingContext.INVERT,
0x8507, // WebGLRenderingContext.INCR_WRAP,
0x8508, // WebGLRenderingContext.DECR_WRAP,
];
const WebGLBlendOps: GLenum[] = [
0x8006, // WebGLRenderingContext.FUNC_ADD,
0x800A, // WebGLRenderingContext.FUNC_SUBTRACT,
0x800B, // WebGLRenderingContext.FUNC_REVERSE_SUBTRACT,
0x8007, // WebGL2RenderingContext.MIN,
0x8008, // WebGL2RenderingContext.MAX,
];
const WebGLBlendFactors: GLenum[] = [
0x0000, // WebGLRenderingContext.ZERO,
0x0001, // WebGLRenderingContext.ONE,
0x0302, // WebGLRenderingContext.SRC_ALPHA,
0x0304, // WebGLRenderingContext.DST_ALPHA,
0x0303, // WebGLRenderingContext.ONE_MINUS_SRC_ALPHA,
0x0305, // WebGLRenderingContext.ONE_MINUS_DST_ALPHA,
0x0300, // WebGLRenderingContext.SRC_COLOR,
0x0306, // WebGLRenderingContext.DST_COLOR,
0x0301, // WebGLRenderingContext.ONE_MINUS_SRC_COLOR,
0x0307, // WebGLRenderingContext.ONE_MINUS_DST_COLOR,
0x0308, // WebGLRenderingContext.SRC_ALPHA_SATURATE,
0x8001, // WebGLRenderingContext.CONSTANT_COLOR,
0x8002, // WebGLRenderingContext.ONE_MINUS_CONSTANT_COLOR,
0x8003, // WebGLRenderingContext.CONSTANT_ALPHA,
0x8004, // WebGLRenderingContext.ONE_MINUS_CONSTANT_ALPHA,
];
export enum WebGL2Cmd {
BEGIN_RENDER_PASS,
END_RENDER_PASS,
BIND_STATES,
DRAW,
UPDATE_BUFFER,
COPY_BUFFER_TO_TEXTURE,
COUNT,
}
export abstract class WebGL2CmdObject {
public cmdType: WebGL2Cmd;
public refCount = 0;
constructor (type: WebGL2Cmd) {
this.cmdType = type;
}
public abstract clear ();
}
export class WebGL2CmdBeginRenderPass extends WebGL2CmdObject {
public gpuRenderPass: IWebGL2GPURenderPass | null = null;
public gpuFramebuffer: IWebGL2GPUFramebuffer | null = null;
public renderArea = new Rect();
public clearColors: Color[] = [];
public clearDepth = 1.0;
public clearStencil = 0;
constructor () {
super(WebGL2Cmd.BEGIN_RENDER_PASS);
}
public clear () {
this.gpuFramebuffer = null;
this.clearColors.length = 0;
}
}
export class WebGL2CmdBindStates extends WebGL2CmdObject {
public gpuPipelineState: IWebGL2GPUPipelineState | null = null;
public gpuInputAssembler: IWebGL2GPUInputAssembler | null = null;
public gpuDescriptorSets: IWebGL2GPUDescriptorSet[] = [];
public dynamicOffsets: number[] = [];
public dynamicStates: DynamicStates = new DynamicStates();
constructor () {
super(WebGL2Cmd.BIND_STATES);
}
public clear () {
this.gpuPipelineState = null;
this.gpuInputAssembler = null;
this.gpuDescriptorSets.length = 0;
this.dynamicOffsets.length = 0;
}
}
export class WebGL2CmdDraw extends WebGL2CmdObject {
public drawInfo = new DrawInfo();
constructor () {
super(WebGL2Cmd.DRAW);
}
public clear () {
}
}
export class WebGL2CmdUpdateBuffer extends WebGL2CmdObject {
public gpuBuffer: IWebGL2GPUBuffer | null = null;
public buffer: BufferSource | null = null;
public offset = 0;
public size = 0;
constructor () {
super(WebGL2Cmd.UPDATE_BUFFER);
}
public clear () {
this.gpuBuffer = null;
this.buffer = null;
}
}
export class WebGL2CmdCopyBufferToTexture extends WebGL2CmdObject {
public gpuTexture: IWebGL2GPUTexture | null = null;
public buffers: ArrayBufferView[] = [];
public regions: BufferTextureCopy[] = [];
constructor () {
super(WebGL2Cmd.COPY_BUFFER_TO_TEXTURE);
}
public clear () {
this.gpuTexture = null;
this.buffers.length = 0;
this.regions.length = 0;
}
}
export class WebGL2CmdPackage {
public cmds: CachedArray<WebGL2Cmd> = new CachedArray(1);
public beginRenderPassCmds: CachedArray<WebGL2CmdBeginRenderPass> = new CachedArray(1);
public bindStatesCmds: CachedArray<WebGL2CmdBindStates> = new CachedArray(1);
public drawCmds: CachedArray<WebGL2CmdDraw> = new CachedArray(1);
public updateBufferCmds: CachedArray<WebGL2CmdUpdateBuffer> = new CachedArray(1);
public copyBufferToTextureCmds: CachedArray<WebGL2CmdCopyBufferToTexture> = new CachedArray(1);
public clearCmds (allocator: WebGL2CommandAllocator) {
if (this.beginRenderPassCmds.length) {
allocator.beginRenderPassCmdPool.freeCmds(this.beginRenderPassCmds);
this.beginRenderPassCmds.clear();
}
if (this.bindStatesCmds.length) {
allocator.bindStatesCmdPool.freeCmds(this.bindStatesCmds);
this.bindStatesCmds.clear();
}
if (this.drawCmds.length) {
allocator.drawCmdPool.freeCmds(this.drawCmds);
this.drawCmds.clear();
}
if (this.updateBufferCmds.length) {
allocator.updateBufferCmdPool.freeCmds(this.updateBufferCmds);
this.updateBufferCmds.clear();
}
if (this.copyBufferToTextureCmds.length) {
allocator.copyBufferToTextureCmdPool.freeCmds(this.copyBufferToTextureCmds);
this.copyBufferToTextureCmds.clear();
}
this.cmds.clear();
}
}
export function WebGL2CmdFuncCreateBuffer (device: WebGL2Device, gpuBuffer: IWebGL2GPUBuffer) {
const { gl } = device;
const cache = device.stateCache;
const glUsage: GLenum = gpuBuffer.memUsage & MemoryUsageBit.HOST ? gl.DYNAMIC_DRAW : gl.STATIC_DRAW;
if (gpuBuffer.usage & BufferUsageBit.VERTEX) {
gpuBuffer.glTarget = gl.ARRAY_BUFFER;
const glBuffer = gl.createBuffer();
if (glBuffer) {
gpuBuffer.glBuffer = glBuffer;
if (gpuBuffer.size > 0) {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (device.stateCache.glArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, gpuBuffer.glBuffer);
device.stateCache.glArrayBuffer = gpuBuffer.glBuffer;
}
gl.bufferData(gl.ARRAY_BUFFER, gpuBuffer.size, glUsage);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
device.stateCache.glArrayBuffer = null;
}
}
} else if (gpuBuffer.usage & BufferUsageBit.INDEX) {
gpuBuffer.glTarget = gl.ELEMENT_ARRAY_BUFFER;
const glBuffer = gl.createBuffer();
if (glBuffer) {
gpuBuffer.glBuffer = glBuffer;
if (gpuBuffer.size > 0) {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (device.stateCache.glElementArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.glBuffer);
device.stateCache.glElementArrayBuffer = gpuBuffer.glBuffer;
}
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.size, glUsage);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
device.stateCache.glElementArrayBuffer = null;
}
}
} else if (gpuBuffer.usage & BufferUsageBit.UNIFORM) {
gpuBuffer.glTarget = gl.UNIFORM_BUFFER;
const glBuffer = gl.createBuffer();
if (glBuffer && gpuBuffer.size > 0) {
gpuBuffer.glBuffer = glBuffer;
if (device.stateCache.glUniformBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.UNIFORM_BUFFER, gpuBuffer.glBuffer);
device.stateCache.glUniformBuffer = gpuBuffer.glBuffer;
}
gl.bufferData(gl.UNIFORM_BUFFER, gpuBuffer.size, glUsage);
gl.bindBuffer(gl.UNIFORM_BUFFER, null);
device.stateCache.glUniformBuffer = null;
}
} else if (gpuBuffer.usage & BufferUsageBit.INDIRECT) {
gpuBuffer.glTarget = gl.NONE;
} else if (gpuBuffer.usage & BufferUsageBit.TRANSFER_DST) {
gpuBuffer.glTarget = gl.NONE;
} else if (gpuBuffer.usage & BufferUsageBit.TRANSFER_SRC) {
gpuBuffer.glTarget = gl.NONE;
} else {
console.error('Unsupported BufferType, create buffer failed.');
gpuBuffer.glTarget = gl.NONE;
}
}
export function WebGL2CmdFuncDestroyBuffer (device: WebGL2Device, gpuBuffer: IWebGL2GPUBuffer) {
const { gl } = device;
const cache = device.stateCache;
if (gpuBuffer.glBuffer) {
// Firefox 75+ implicitly unbind whatever buffer there was on the slot sometimes
// can be reproduced in the static batching scene at https://github.com/cocos-creator/test-cases-3d
switch (gpuBuffer.glTarget) {
case gl.ARRAY_BUFFER:
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
device.stateCache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
gl.bindBuffer(gl.ARRAY_BUFFER, null);
device.stateCache.glArrayBuffer = null;
break;
case gl.ELEMENT_ARRAY_BUFFER:
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
device.stateCache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
device.stateCache.glElementArrayBuffer = null;
break;
case gl.UNIFORM_BUFFER:
gl.bindBuffer(gl.UNIFORM_BUFFER, null);
device.stateCache.glUniformBuffer = null;
break;
default:
}
gl.deleteBuffer(gpuBuffer.glBuffer);
gpuBuffer.glBuffer = null;
}
}
export function WebGL2CmdFuncResizeBuffer (device: WebGL2Device, gpuBuffer: IWebGL2GPUBuffer) {
const { gl } = device;
const cache = device.stateCache;
const glUsage: GLenum = gpuBuffer.memUsage & MemoryUsageBit.HOST ? gl.DYNAMIC_DRAW : gl.STATIC_DRAW;
if (gpuBuffer.usage & BufferUsageBit.VERTEX) {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (cache.glArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, gpuBuffer.glBuffer);
}
if (gpuBuffer.buffer) {
gl.bufferData(gl.ARRAY_BUFFER, gpuBuffer.buffer, glUsage);
} else {
gl.bufferData(gl.ARRAY_BUFFER, gpuBuffer.size, glUsage);
}
gl.bindBuffer(gl.ARRAY_BUFFER, null);
cache.glArrayBuffer = null;
} else if (gpuBuffer.usage & BufferUsageBit.INDEX) {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (device.stateCache.glElementArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.glBuffer);
}
if (gpuBuffer.buffer) {
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.buffer, glUsage);
} else {
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.size, glUsage);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
device.stateCache.glElementArrayBuffer = null;
} else if (gpuBuffer.usage & BufferUsageBit.UNIFORM) {
if (device.stateCache.glUniformBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.UNIFORM_BUFFER, gpuBuffer.glBuffer);
}
gl.bufferData(gl.UNIFORM_BUFFER, gpuBuffer.size, glUsage);
gl.bindBuffer(gl.UNIFORM_BUFFER, null);
device.stateCache.glUniformBuffer = null;
} else if ((gpuBuffer.usage & BufferUsageBit.INDIRECT)
|| (gpuBuffer.usage & BufferUsageBit.TRANSFER_DST)
|| (gpuBuffer.usage & BufferUsageBit.TRANSFER_SRC)) {
gpuBuffer.glTarget = gl.NONE;
} else {
console.error('Unsupported BufferType, create buffer failed.');
gpuBuffer.glTarget = gl.NONE;
}
}
export function WebGL2CmdFuncUpdateBuffer (device: WebGL2Device, gpuBuffer: IWebGL2GPUBuffer, buffer: BufferSource, offset: number, size: number) {
if (gpuBuffer.usage & BufferUsageBit.INDIRECT) {
gpuBuffer.indirects.clearDraws();
const drawInfos = (buffer as IndirectBuffer).drawInfos;
for (let i = 0; i < drawInfos.length; ++i) {
gpuBuffer.indirects.setDrawInfo(offset + i, drawInfos[i]);
}
} else {
const buff = buffer as ArrayBuffer;
const { gl } = device;
const cache = device.stateCache;
switch (gpuBuffer.glTarget) {
case gl.ARRAY_BUFFER: {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (cache.glArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, gpuBuffer.glBuffer);
cache.glArrayBuffer = gpuBuffer.glBuffer;
}
if (size === buff.byteLength) {
gl.bufferSubData(gpuBuffer.glTarget, offset, buff);
} else {
gl.bufferSubData(gpuBuffer.glTarget, offset, buff.slice(0, size));
}
break;
}
case gl.ELEMENT_ARRAY_BUFFER: {
if (device.extensions.useVAO) {
if (cache.glVAO) {
gl.bindVertexArray(null);
cache.glVAO = null;
}
}
gfxStateCache.gpuInputAssembler = null;
if (cache.glElementArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.glBuffer);
cache.glElementArrayBuffer = gpuBuffer.glBuffer;
}
if (size === buff.byteLength) {
gl.bufferSubData(gpuBuffer.glTarget, offset, buff);
} else {
gl.bufferSubData(gpuBuffer.glTarget, offset, buff.slice(0, size));
}
break;
}
case gl.UNIFORM_BUFFER: {
if (cache.glUniformBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.UNIFORM_BUFFER, gpuBuffer.glBuffer);
cache.glUniformBuffer = gpuBuffer.glBuffer;
}
if (size === buff.byteLength) {
gl.bufferSubData(gpuBuffer.glTarget, offset, buff);
} else {
gl.bufferSubData(gpuBuffer.glTarget, offset, new Float32Array(buff, 0, size / 4));
}
break;
}
default: {
console.error('Unsupported BufferType, update buffer failed.');
}
}
}
}
export function WebGL2CmdFuncCreateTexture (device: WebGL2Device, gpuTexture: IWebGL2GPUTexture) {
const { gl } = device;
gpuTexture.glInternalFmt = GFXFormatToWebGLInternalFormat(gpuTexture.format, gl);
gpuTexture.glFormat = GFXFormatToWebGLFormat(gpuTexture.format, gl);
gpuTexture.glType = GFXFormatToWebGLType(gpuTexture.format, gl);
let w = gpuTexture.width;
let h = gpuTexture.height;
switch (gpuTexture.type) {
case TextureType.TEX2D: {
gpuTexture.glTarget = gl.TEXTURE_2D;
if (gpuTexture.isSwapchainTexture) break;
const maxSize = Math.max(w, h);
if (maxSize > device.capabilities.maxTextureSize) {
errorID(9100, maxSize, device.capabilities.maxTextureSize);
}
if (gpuTexture.samples === SampleCount.ONE) {
gpuTexture.glTexture = gl.createTexture();
if (gpuTexture.size > 0) {
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gl.TEXTURE_2D, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
if (FormatInfos[gpuTexture.format].isCompressed) {
for (let i = 0; i < gpuTexture.mipLevel; ++i) {
const imgSize = FormatSize(gpuTexture.format, w, h, 1);
const view: Uint8Array = new Uint8Array(imgSize);
gl.compressedTexImage2D(gl.TEXTURE_2D, i, gpuTexture.glInternalFmt, w, h, 0, view);
w = Math.max(1, w >> 1);
h = Math.max(1, h >> 1);
}
} else {
gl.texStorage2D(gl.TEXTURE_2D, gpuTexture.mipLevel, gpuTexture.glInternalFmt, w, h);
}
}
} else {
gpuTexture.glRenderbuffer = gl.createRenderbuffer();
if (gpuTexture.size > 0) {
if (device.stateCache.glRenderbuffer !== gpuTexture.glRenderbuffer) {
gl.bindRenderbuffer(gl.RENDERBUFFER, gpuTexture.glRenderbuffer);
device.stateCache.glRenderbuffer = gpuTexture.glRenderbuffer;
}
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gpuTexture.samples,
gpuTexture.glInternalFmt, gpuTexture.width, gpuTexture.height);
}
}
break;
}
case TextureType.CUBE: {
gpuTexture.glTarget = gl.TEXTURE_CUBE_MAP;
const maxSize = Math.max(w, h);
if (maxSize > device.capabilities.maxCubeMapTextureSize) {
errorID(9100, maxSize, device.capabilities.maxTextureSize);
}
gpuTexture.glTexture = gl.createTexture();
if (gpuTexture.size > 0) {
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gl.TEXTURE_CUBE_MAP, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
if (FormatInfos[gpuTexture.format].isCompressed) {
for (let i = 0; i < gpuTexture.mipLevel; ++i) {
const imgSize = FormatSize(gpuTexture.format, w, h, 1);
const view: Uint8Array = new Uint8Array(imgSize);
for (let f = 0; f < 6; ++f) {
gl.compressedTexImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, i, gpuTexture.glInternalFmt, w, h, 0, view);
}
w = Math.max(1, w >> 1);
h = Math.max(1, h >> 1);
}
} else {
gl.texStorage2D(gl.TEXTURE_CUBE_MAP, gpuTexture.mipLevel, gpuTexture.glInternalFmt, w, h);
}
}
break;
}
default: {
console.error('Unsupported TextureType, create texture failed.');
gpuTexture.type = TextureType.TEX2D;
gpuTexture.glTarget = gl.TEXTURE_2D;
}
}
}
export function WebGL2CmdFuncDestroyTexture (device: WebGL2Device, gpuTexture: IWebGL2GPUTexture) {
if (gpuTexture.glTexture) {
device.gl.deleteTexture(gpuTexture.glTexture);
gpuTexture.glTexture = null;
}
if (gpuTexture.glRenderbuffer) {
device.gl.deleteRenderbuffer(gpuTexture.glRenderbuffer);
gpuTexture.glRenderbuffer = null;
}
}
export function WebGL2CmdFuncResizeTexture (device: WebGL2Device, gpuTexture: IWebGL2GPUTexture) {
if (!gpuTexture.size) return;
const { gl } = device;
let w = gpuTexture.width;
let h = gpuTexture.height;
switch (gpuTexture.type) {
case TextureType.TEX2D: {
gpuTexture.glTarget = gl.TEXTURE_2D;
const maxSize = Math.max(w, h);
if (maxSize > device.capabilities.maxTextureSize) {
errorID(9100, maxSize, device.capabilities.maxTextureSize);
}
if (gpuTexture.samples === SampleCount.ONE) {
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gl.TEXTURE_2D, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
if (FormatInfos[gpuTexture.format].isCompressed) {
for (let i = 0; i < gpuTexture.mipLevel; ++i) {
const imgSize = FormatSize(gpuTexture.format, w, h, 1);
const view: Uint8Array = new Uint8Array(imgSize);
gl.compressedTexImage2D(gl.TEXTURE_2D, i, gpuTexture.glInternalFmt, w, h, 0, view);
w = Math.max(1, w >> 1);
h = Math.max(1, h >> 1);
}
} else {
// immutable by default
WebGL2CmdFuncDestroyTexture(device, gpuTexture);
WebGL2CmdFuncCreateTexture(device, gpuTexture);
}
} else if (gpuTexture.glRenderbuffer) {
if (device.stateCache.glRenderbuffer !== gpuTexture.glRenderbuffer) {
gl.bindRenderbuffer(gl.RENDERBUFFER, gpuTexture.glRenderbuffer);
device.stateCache.glRenderbuffer = gpuTexture.glRenderbuffer;
}
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gpuTexture.samples,
gpuTexture.glInternalFmt, gpuTexture.width, gpuTexture.height);
}
break;
}
case TextureType.CUBE: {
gpuTexture.type = TextureType.CUBE;
gpuTexture.glTarget = gl.TEXTURE_CUBE_MAP;
const maxSize = Math.max(w, h);
if (maxSize > device.capabilities.maxCubeMapTextureSize) {
errorID(9100, maxSize, device.capabilities.maxTextureSize);
}
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gl.TEXTURE_CUBE_MAP, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
if (FormatInfos[gpuTexture.format].isCompressed) {
for (let f = 0; f < 6; ++f) {
w = gpuTexture.width;
h = gpuTexture.height;
for (let i = 0; i < gpuTexture.mipLevel; ++i) {
const imgSize = FormatSize(gpuTexture.format, w, h, 1);
const view: Uint8Array = new Uint8Array(imgSize);
gl.compressedTexImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, i, gpuTexture.glInternalFmt, w, h, 0, view);
w = Math.max(1, w >> 1);
h = Math.max(1, h >> 1);
}
}
} else {
// immutable by default
WebGL2CmdFuncDestroyTexture(device, gpuTexture);
WebGL2CmdFuncCreateTexture(device, gpuTexture);
}
break;
}
default: {
console.error('Unsupported TextureType, create texture failed.');
gpuTexture.type = TextureType.TEX2D;
gpuTexture.glTarget = gl.TEXTURE_2D;
}
}
}
export function WebGL2CmdFuncCreateSampler (device: WebGL2Device, gpuSampler: IWebGL2GPUSampler) {
const { gl } = device;
const glSampler = gl.createSampler();
if (glSampler) {
if (gpuSampler.minFilter === Filter.LINEAR || gpuSampler.minFilter === Filter.ANISOTROPIC) {
if (gpuSampler.mipFilter === Filter.LINEAR || gpuSampler.mipFilter === Filter.ANISOTROPIC) {
gpuSampler.glMinFilter = gl.LINEAR_MIPMAP_LINEAR;
} else if (gpuSampler.mipFilter === Filter.POINT) {
gpuSampler.glMinFilter = gl.LINEAR_MIPMAP_NEAREST;
} else {
gpuSampler.glMinFilter = gl.LINEAR;
}
} else if (gpuSampler.mipFilter === Filter.LINEAR || gpuSampler.mipFilter === Filter.ANISOTROPIC) {
gpuSampler.glMinFilter = gl.NEAREST_MIPMAP_LINEAR;
} else if (gpuSampler.mipFilter === Filter.POINT) {
gpuSampler.glMinFilter = gl.NEAREST_MIPMAP_NEAREST;
} else {
gpuSampler.glMinFilter = gl.NEAREST;
}
if (gpuSampler.magFilter === Filter.LINEAR || gpuSampler.magFilter === Filter.ANISOTROPIC) {
gpuSampler.glMagFilter = gl.LINEAR;
} else {
gpuSampler.glMagFilter = gl.NEAREST;
}
gpuSampler.glWrapS = WebGLWraps[gpuSampler.addressU];
gpuSampler.glWrapT = WebGLWraps[gpuSampler.addressV];
gpuSampler.glWrapR = WebGLWraps[gpuSampler.addressW];
gpuSampler.glSampler = glSampler;
gl.samplerParameteri(glSampler, gl.TEXTURE_MIN_FILTER, gpuSampler.glMinFilter);
gl.samplerParameteri(glSampler, gl.TEXTURE_MAG_FILTER, gpuSampler.glMagFilter);
gl.samplerParameteri(glSampler, gl.TEXTURE_WRAP_S, gpuSampler.glWrapS);
gl.samplerParameteri(glSampler, gl.TEXTURE_WRAP_T, gpuSampler.glWrapT);
gl.samplerParameteri(glSampler, gl.TEXTURE_WRAP_R, gpuSampler.glWrapR);
gl.samplerParameterf(glSampler, gl.TEXTURE_MIN_LOD, 0);
gl.samplerParameterf(glSampler, gl.TEXTURE_MAX_LOD, 1000);
}
}
export function WebGL2CmdFuncDestroySampler (device: WebGL2Device, gpuSampler: IWebGL2GPUSampler) {
if (gpuSampler.glSampler) {
device.gl.deleteSampler(gpuSampler.glSampler);
gpuSampler.glSampler = null;
}
}
export function WebGL2CmdFuncCreateFramebuffer (device: WebGL2Device, gpuFramebuffer: IWebGL2GPUFramebuffer) {
for (let i = 0; i < gpuFramebuffer.gpuColorTextures.length; ++i) {
const tex = gpuFramebuffer.gpuColorTextures[i];
if (tex.isSwapchainTexture) {
gpuFramebuffer.isOffscreen = false;
return;
}
}
const { gl } = device;
const attachments: GLenum[] = [];
const glFramebuffer = gl.createFramebuffer();
if (glFramebuffer) {
gpuFramebuffer.glFramebuffer = glFramebuffer;
if (device.stateCache.glFramebuffer !== gpuFramebuffer.glFramebuffer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, gpuFramebuffer.glFramebuffer);
}
for (let i = 0; i < gpuFramebuffer.gpuColorTextures.length; ++i) {
const colorTexture = gpuFramebuffer.gpuColorTextures[i];
if (colorTexture) {
if (colorTexture.glTexture) {
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0 + i,
colorTexture.glTarget,
colorTexture.glTexture,
0,
); // level should be 0.
} else {
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0 + i,
gl.RENDERBUFFER,
colorTexture.glRenderbuffer,
);
}
attachments.push(gl.COLOR_ATTACHMENT0 + i);
gpuFramebuffer.width = Math.min(gpuFramebuffer.width, colorTexture.width);
gpuFramebuffer.height = Math.min(gpuFramebuffer.height, colorTexture.height);
}
}
const dst = gpuFramebuffer.gpuDepthStencilTexture;
if (dst) {
const glAttachment = FormatInfos[dst.format].hasStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT;
if (dst.glTexture) {
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
glAttachment,
dst.glTarget,
dst.glTexture,
0,
); // level must be 0
} else {
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER,
glAttachment,
gl.RENDERBUFFER,
dst.glRenderbuffer,
);
}
gpuFramebuffer.width = Math.min(gpuFramebuffer.width, dst.width);
gpuFramebuffer.height = Math.min(gpuFramebuffer.height, dst.height);
}
gl.drawBuffers(attachments);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
switch (status) {
case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT: {
console.error('glCheckFramebufferStatus() - FRAMEBUFFER_INCOMPLETE_ATTACHMENT');
break;
}
case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: {
console.error('glCheckFramebufferStatus() - FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT');
break;
}
case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS: {
console.error('glCheckFramebufferStatus() - FRAMEBUFFER_INCOMPLETE_DIMENSIONS');
break;
}
case gl.FRAMEBUFFER_UNSUPPORTED: {
console.error('glCheckFramebufferStatus() - FRAMEBUFFER_UNSUPPORTED');
break;
}
default:
}
}
if (device.stateCache.glFramebuffer !== gpuFramebuffer.glFramebuffer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, device.stateCache.glFramebuffer);
}
}
}
export function WebGL2CmdFuncDestroyFramebuffer (device: WebGL2Device, gpuFramebuffer: IWebGL2GPUFramebuffer) {
if (gpuFramebuffer.glFramebuffer) {
device.gl.deleteFramebuffer(gpuFramebuffer.glFramebuffer);
gpuFramebuffer.glFramebuffer = null;
}
}
export function WebGL2CmdFuncCreateShader (device: WebGL2Device, gpuShader: IWebGL2GPUShader) {
const { gl } = device;
for (let k = 0; k < gpuShader.gpuStages.length; k++) {
const gpuStage = gpuShader.gpuStages[k];
let glShaderType: GLenum = 0;
let shaderTypeStr = '';
let lineNumber = 1;
switch (gpuStage.type) {
case ShaderStageFlagBit.VERTEX: {
shaderTypeStr = 'VertexShader';
glShaderType = gl.VERTEX_SHADER;
break;
}
case ShaderStageFlagBit.FRAGMENT: {
shaderTypeStr = 'FragmentShader';
glShaderType = gl.FRAGMENT_SHADER;
break;
}
default: {
console.error('Unsupported ShaderType.');
return;
}
}
const glShader = gl.createShader(glShaderType);
if (glShader) {
gpuStage.glShader = glShader;
gl.shaderSource(gpuStage.glShader, `#version 300 es\n${gpuStage.source}`);
gl.compileShader(gpuStage.glShader);
if (!gl.getShaderParameter(gpuStage.glShader, gl.COMPILE_STATUS)) {
console.error(`${shaderTypeStr} in '${gpuShader.name}' compilation failed.`);
console.error('Shader source dump:', gpuStage.source.replace(/^|\n/g, () => `\n${lineNumber++} `));
console.error(gl.getShaderInfoLog(gpuStage.glShader));
for (let l = 0; l < gpuShader.gpuStages.length; l++) {
const stage = gpuShader.gpuStages[k];
if (stage.glShader) {
gl.deleteShader(stage.glShader);
stage.glShader = null;
}
}
return;
}
}
}
const glProgram = gl.createProgram();
if (!glProgram) {
return;
}
gpuShader.glProgram = glProgram;
// link program
for (let k = 0; k < gpuShader.gpuStages.length; k++) {
const gpuStage = gpuShader.gpuStages[k];
gl.attachShader(gpuShader.glProgram, gpuStage.glShader!);
}
gl.linkProgram(gpuShader.glProgram);
// detach & delete immediately
for (let k = 0; k < gpuShader.gpuStages.length; k++) {
const gpuStage = gpuShader.gpuStages[k];
if (gpuStage.glShader) {
gl.detachShader(gpuShader.glProgram, gpuStage.glShader);
gl.deleteShader(gpuStage.glShader);
gpuStage.glShader = null;
}
}
if (gl.getProgramParameter(gpuShader.glProgram, gl.LINK_STATUS)) {
debug(`Shader '${gpuShader.name}' compilation succeeded.`);
} else {
console.error(`Failed to link shader '${gpuShader.name}'.`);
console.error(gl.getProgramInfoLog(gpuShader.glProgram));
return;
}
// parse inputs
const activeAttribCount = gl.getProgramParameter(gpuShader.glProgram, gl.ACTIVE_ATTRIBUTES);
gpuShader.glInputs = new Array<IWebGL2GPUInput>(activeAttribCount);
for (let i = 0; i < activeAttribCount; ++i) {
const attribInfo = gl.getActiveAttrib(gpuShader.glProgram, i);
if (attribInfo) {
let varName: string;
const nameOffset = attribInfo.name.indexOf('[');
if (nameOffset !== -1) {
varName = attribInfo.name.substr(0, nameOffset);
} else {
varName = attribInfo.name;
}
const glLoc = gl.getAttribLocation(gpuShader.glProgram, varName);
const type = WebGLTypeToGFXType(attribInfo.type, gl);
const stride = WebGLGetTypeSize(attribInfo.type, gl);
gpuShader.glInputs[i] = {
name: varName,
type,
stride,
count: attribInfo.size,
size: stride * attribInfo.size,
glType: attribInfo.type,
glLoc,
};
}
}
// create uniform blocks
const activeBlockCount = gl.getProgramParameter(gpuShader.glProgram, gl.ACTIVE_UNIFORM_BLOCKS);
let blockName: string;
let blockIdx: number;
let blockSize: number;
let block: UniformBlock | null;
if (activeBlockCount) {
gpuShader.glBlocks = new Array<IWebGL2GPUUniformBlock>(activeBlockCount);
for (let b = 0; b < activeBlockCount; ++b) {
blockName = gl.getActiveUniformBlockName(gpuShader.glProgram, b)!;
const nameOffset = blockName.indexOf('[');
if (nameOffset !== -1) {
blockName = blockName.substr(0, nameOffset);
}
// blockIdx = gl.getUniformBlockIndex(gpuShader.glProgram, blockName);
block = null;
for (let k = 0; k < gpuShader.blocks.length; k++) {
if (gpuShader.blocks[k].name === blockName) {
block = gpuShader.blocks[k];
break;
}
}
if (!block) {
error(`Block '${blockName}' does not bound`);
} else {
// blockIdx = gl.getUniformBlockIndex(gpuShader.glProgram, blockName);
blockIdx = b;
blockSize = gl.getActiveUniformBlockParameter(gpuShader.glProgram, blockIdx, gl.UNIFORM_BLOCK_DATA_SIZE);
const glBinding = block.binding + (device.bindingMappingInfo.bufferOffsets[block.set] || 0);
gl.uniformBlockBinding(gpuShader.glProgram, blockIdx, glBinding);
gpuShader.glBlocks[b] = {
set: block.set,
binding: block.binding,
idx: blockIdx,
name: blockName,
size: blockSize,
glBinding,
};
}
}
}
// WebGL doesn't support Framebuffer Fetch
for (let i = 0; i < gpuShader.subpassInputs.length; ++i) {
const subpassInput = gpuShader.subpassInputs[i];
gpuShader.samplerTextures.push(new UniformSamplerTexture(
subpassInput.set, subpassInput.binding, subpassInput.name, Type.SAMPLER2D, subpassInput.count,
));
}
// create uniform sampler textures
if (gpuShader.samplerTextures.length > 0) {
gpuShader.glSamplerTextures = new Array<IWebGL2GPUUniformSamplerTexture>(gpuShader.samplerTextures.length);
for (let i = 0; i < gpuShader.samplerTextures.length; ++i) {
const sampler = gpuShader.samplerTextures[i];
gpuShader.glSamplerTextures[i] = {
set: sampler.set,
binding: sampler.binding,
name: sampler.name,
type: sampler.type,
count: sampler.count,
units: [],
glUnits: null!,
glType: GFXTypeToWebGLType(sampler.type, gl),
glLoc: null!,
};
}
}
// texture unit index mapping optimization
const glActiveSamplers: IWebGL2GPUUniformSamplerTexture[] = [];
const glActiveSamplerLocations: WebGLUniformLocation[] = [];
const texUnitCacheMap = device.stateCache.texUnitCacheMap;
let flexibleSetBaseOffset = 0;
for (let i = 0; i < gpuShader.blocks.length; ++i) {
if (gpuShader.blocks[i].set === device.bindingMappingInfo.flexibleSet) {
flexibleSetBaseOffset++;
}
}
let arrayOffset = 0;
for (let i = 0; i < gpuShader.samplerTextures.length; ++i) {
const sampler = gpuShader.samplerTextures[i];
const glLoc = gl.getUniformLocation(gpuShader.glProgram, sampler.name);
// wEcHAT just returns { id: -1 } for non-existing names /eyerolling
if (glLoc && (glLoc as any).id !== -1) {
glActiveSamplers.push(gpuShader.glSamplerTextures[i]);
glActiveSamplerLocations.push(glLoc);
}
if (texUnitCacheMap[sampler.name] === undefined) {
let binding = sampler.binding + device.bindingMappingInfo.samplerOffsets[sampler.set] + arrayOffset;
if (sampler.set === device.bindingMappingInfo.flexibleSet) { binding -= flexibleSetBaseOffset; }
texUnitCacheMap[sampler.name] = binding % device.capabilities.maxTextureUnits;
arrayOffset += sampler.count - 1;
}
}
if (glActiveSamplers.length) {
const usedTexUnits: boolean[] = [];
// try to reuse existing mappings first
for (let i = 0; i < glActiveSamplers.length; ++i) {
const glSampler = glActiveSamplers[i];
let cachedUnit = texUnitCacheMap[glSampler.name];
if (cachedUnit !== undefined) {
glSampler.glLoc = glActiveSamplerLocations[i];
for (let t = 0; t < glSampler.count; ++t) {
while (usedTexUnits[cachedUnit]) {
cachedUnit = (cachedUnit + 1) % device.capabilities.maxTextureUnits;
}
glSampler.units.push(cachedUnit);
usedTexUnits[cachedUnit] = true;
}
}
}
// fill in the rest sequencially
let unitIdx = 0;
for (let i = 0; i < glActiveSamplers.length; ++i) {
const glSampler = glActiveSamplers[i];
if (!glSampler.glLoc) {
glSampler.glLoc = glActiveSamplerLocations[i];
while (usedTexUnits[unitIdx]) { unitIdx++; }
for (let t = 0; t < glSampler.count; ++t) {
while (usedTexUnits[unitIdx]) {
unitIdx = (unitIdx + 1) % device.capabilities.maxTextureUnits;
}
if (texUnitCacheMap[glSampler.name] === undefined) {
texUnitCacheMap[glSampler.name] = unitIdx;
}
glSampler.units.push(unitIdx);
usedTexUnits[unitIdx] = true;
}
}
}
if (device.stateCache.glProgram !== gpuShader.glProgram) {
gl.useProgram(gpuShader.glProgram);
}
for (let k = 0; k < glActiveSamplers.length; k++) {
const glSampler = glActiveSamplers[k];
glSampler.glUnits = new Int32Array(glSampler.units);
gl.uniform1iv(glSampler.glLoc, glSampler.glUnits);
}
if (device.stateCache.glProgram !== gpuShader.glProgram) {
gl.useProgram(device.stateCache.glProgram);
}
}
gpuShader.glSamplerTextures = glActiveSamplers;
}
export function WebGL2CmdFuncDestroyShader (device: WebGL2Device, gpuShader: IWebGL2GPUShader) {
if (gpuShader.glProgram) {
device.gl.deleteProgram(gpuShader.glProgram);
gpuShader.glProgram = null;
}
}
export function WebGL2CmdFuncCreateInputAssember (device: WebGL2Device, gpuInputAssembler: IWebGL2GPUInputAssembler) {
const { gl } = device;
gpuInputAssembler.glAttribs = new Array<IWebGL2Attrib>(gpuInputAssembler.attributes.length);
const offsets = [0, 0, 0, 0, 0, 0, 0, 0];
for (let i = 0; i < gpuInputAssembler.attributes.length; ++i) {
const attrib = gpuInputAssembler.attributes[i];
const stream = attrib.stream !== undefined ? attrib.stream : 0;
// if (stream < gpuInputAssembler.gpuVertexBuffers.length) {
const gpuBuffer = gpuInputAssembler.gpuVertexBuffers[stream];
const glType = GFXFormatToWebGLType(attrib.format, gl);
const { size } = FormatInfos[attrib.format];
gpuInputAssembler.glAttribs[i] = {
name: attrib.name,
glBuffer: gpuBuffer.glBuffer,
glType,
size,
count: FormatInfos[attrib.format].count,
stride: gpuBuffer.stride,
componentCount: WebGLGetComponentCount(glType, gl),
isNormalized: (attrib.isNormalized !== undefined ? attrib.isNormalized : false),
isInstanced: (attrib.isInstanced !== undefined ? attrib.isInstanced : false),
offset: offsets[stream],
};
offsets[stream] += size;
}
}
export function WebGL2CmdFuncDestroyInputAssembler (device: WebGL2Device, gpuInputAssembler: IWebGL2GPUInputAssembler) {
const it = gpuInputAssembler.glVAOs.values();
let res = it.next();
while (!res.done) {
device.gl.deleteVertexArray(res.value);
res = it.next();
}
gpuInputAssembler.glVAOs.clear();
}
interface IWebGL2StateCache {
gpuPipelineState: IWebGL2GPUPipelineState | null;
gpuInputAssembler: IWebGL2GPUInputAssembler | null;
glPrimitive: number;
invalidateAttachments: GLenum[];
}
const gfxStateCache: IWebGL2StateCache = {
gpuPipelineState: null,
gpuInputAssembler: null,
glPrimitive: 0,
invalidateAttachments: [],
};
export function WebGL2CmdFuncBeginRenderPass (
device: WebGL2Device,
gpuRenderPass: IWebGL2GPURenderPass | null,
gpuFramebuffer: IWebGL2GPUFramebuffer | null,
renderArea: Rect,
clearColors: Color[],
clearDepth: number,
clearStencil: number,
) {
const { gl } = device;
const cache = device.stateCache;
let clears: GLbitfield = 0;
if (gpuFramebuffer && gpuRenderPass) {
if (cache.glFramebuffer !== gpuFramebuffer.glFramebuffer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, gpuFramebuffer.glFramebuffer);
cache.glFramebuffer = gpuFramebuffer.glFramebuffer;
}
if (cache.viewport.left !== renderArea.x
|| cache.viewport.top !== renderArea.y
|| cache.viewport.width !== renderArea.width
|| cache.viewport.height !== renderArea.height) {
gl.viewport(renderArea.x, renderArea.y, renderArea.width, renderArea.height);
cache.viewport.left = renderArea.x;
cache.viewport.top = renderArea.y;
cache.viewport.width = renderArea.width;
cache.viewport.height = renderArea.height;
}
if (cache.scissorRect.x !== 0
|| cache.scissorRect.y !== 0
|| cache.scissorRect.width !== gpuFramebuffer.width
|| cache.scissorRect.height !== gpuFramebuffer.height) {
gl.scissor(0, 0, gpuFramebuffer.width, gpuFramebuffer.height);
cache.scissorRect.x = 0;
cache.scissorRect.y = 0;
cache.scissorRect.width = gpuFramebuffer.width;
cache.scissorRect.height = gpuFramebuffer.height;
}
gfxStateCache.invalidateAttachments.length = 0;
for (let j = 0; j < clearColors.length; ++j) {
const colorAttachment = gpuRenderPass.colorAttachments[j];
if (colorAttachment.format !== Format.UNKNOWN) {
switch (colorAttachment.loadOp) {
case LoadOp.LOAD: break; // GL default behavior
case LoadOp.CLEAR: {
if (cache.bs.targets[0].blendColorMask !== ColorMask.ALL) {
gl.colorMask(true, true, true, true);
}
if (!gpuFramebuffer.isOffscreen) {
const clearColor = clearColors[0];
gl.clearColor(clearColor.x, clearColor.y, clearColor.z, clearColor.w);
clears |= gl.COLOR_BUFFER_BIT;
} else {
_f32v4[0] = clearColors[j].x;
_f32v4[1] = clearColors[j].y;
_f32v4[2] = clearColors[j].z;
_f32v4[3] = clearColors[j].w;
gl.clearBufferfv(gl.COLOR, j, _f32v4);
}
break;
}
case LoadOp.DISCARD: {
// invalidate the framebuffer
gfxStateCache.invalidateAttachments.push(gl.COLOR_ATTACHMENT0 + j);
break;
}
default:
}
}
} // if (curGPURenderPass)
if (gpuRenderPass.depthStencilAttachment) {
if (gpuRenderPass.depthStencilAttachment.format !== Format.UNKNOWN) {
switch (gpuRenderPass.depthStencilAttachment.depthLoadOp) {
case LoadOp.LOAD: break; // GL default behavior
case LoadOp.CLEAR: {
if (!cache.dss.depthWrite) {
gl.depthMask(true);
}
gl.clearDepth(clearDepth);
clears |= gl.DEPTH_BUFFER_BIT;
break;
}
case LoadOp.DISCARD: {
// invalidate the framebuffer
gfxStateCache.invalidateAttachments.push(gl.DEPTH_ATTACHMENT);
break;
}
default:
}
if (FormatInfos[gpuRenderPass.depthStencilAttachment.format].hasStencil) {
switch (gpuRenderPass.depthStencilAttachment.stencilLoadOp) {
case LoadOp.LOAD: break; // GL default behavior
case LoadOp.CLEAR: {
if (!cache.dss.stencilWriteMaskFront) {
gl.stencilMaskSeparate(gl.FRONT, 0xffff);
}
if (!cache.dss.stencilWriteMaskBack) {
gl.stencilMaskSeparate(gl.BACK, 0xffff);
}
gl.clearStencil(clearStencil);
clears |= gl.STENCIL_BUFFER_BIT;
break;
}
case LoadOp.DISCARD: {
// invalidate the framebuffer
gfxStateCache.invalidateAttachments.push(gl.STENCIL_ATTACHMENT);
break;
}
default:
}
}
}
} // if (curGPURenderPass.depthStencilAttachment)
if (gpuFramebuffer.glFramebuffer && gfxStateCache.invalidateAttachments.length) {
gl.invalidateFramebuffer(gl.FRAMEBUFFER, gfxStateCache.invalidateAttachments);
}
if (clears) {
gl.clear(clears);
}
// restore states
if (clears & gl.COLOR_BUFFER_BIT) {
const colorMask = cache.bs.targets[0].blendColorMask;
if (colorMask !== ColorMask.ALL) {
const r = (colorMask & ColorMask.R) !== ColorMask.NONE;
const g = (colorMask & ColorMask.G) !== ColorMask.NONE;
const b = (colorMask & ColorMask.B) !== ColorMask.NONE;
const a = (colorMask & ColorMask.A) !== ColorMask.NONE;
gl.colorMask(r, g, b, a);
}
}
if ((clears & gl.DEPTH_BUFFER_BIT)
&& !cache.dss.depthWrite) {
gl.depthMask(false);
}
if (clears & gl.STENCIL_BUFFER_BIT) {
if (!cache.dss.stencilWriteMaskFront) {
gl.stencilMaskSeparate(gl.FRONT, 0);
}
if (!cache.dss.stencilWriteMaskBack) {
gl.stencilMaskSeparate(gl.BACK, 0);
}
}
} // if (gpuFramebuffer)
}
export function WebGL2CmdFuncBindStates (
device: WebGL2Device,
gpuPipelineState: IWebGL2GPUPipelineState | null,
gpuInputAssembler: IWebGL2GPUInputAssembler | null,
gpuDescriptorSets: IWebGL2GPUDescriptorSet[],
dynamicOffsets: number[],
dynamicStates: DynamicStates,
) {
const { gl } = device;
const cache = device.stateCache;
const gpuShader = gpuPipelineState && gpuPipelineState.gpuShader;
let isShaderChanged = false;
// bind pipeline
if (gpuPipelineState && gfxStateCache.gpuPipelineState !== gpuPipelineState) {
gfxStateCache.gpuPipelineState = gpuPipelineState;
gfxStateCache.glPrimitive = gpuPipelineState.glPrimitive;
if (gpuShader) {
const { glProgram } = gpuShader;
if (cache.glProgram !== glProgram) {
gl.useProgram(glProgram);
cache.glProgram = glProgram;
isShaderChanged = true;
}
}
// rasterizer state
const { rs } = gpuPipelineState;
if (rs) {
if (cache.rs.cullMode !== rs.cullMode) {
switch (rs.cullMode) {
case CullMode.NONE: {
gl.disable(gl.CULL_FACE);
break;
}
case CullMode.FRONT: {
gl.enable(gl.CULL_FACE);
gl.cullFace(gl.FRONT);
break;
}
case CullMode.BACK: {
gl.enable(gl.CULL_FACE);
gl.cullFace(gl.BACK);
break;
}
default:
}
device.stateCache.rs.cullMode = rs.cullMode;
}
const isFrontFaceCCW = rs.isFrontFaceCCW; // boolean XOR
if (device.stateCache.rs.isFrontFaceCCW !== isFrontFaceCCW) {
gl.frontFace(isFrontFaceCCW ? gl.CCW : gl.CW);
device.stateCache.rs.isFrontFaceCCW = isFrontFaceCCW;
}
if ((device.stateCache.rs.depthBias !== rs.depthBias)
|| (device.stateCache.rs.depthBiasSlop !== rs.depthBiasSlop)) {
gl.polygonOffset(rs.depthBias, rs.depthBiasSlop);
device.stateCache.rs.depthBias = rs.depthBias;
device.stateCache.rs.depthBiasSlop = rs.depthBiasSlop;
}
if (device.stateCache.rs.lineWidth !== rs.lineWidth) {
gl.lineWidth(rs.lineWidth);
device.stateCache.rs.lineWidth = rs.lineWidth;
}
} // rasterizater state
// depth-stencil state
const { dss } = gpuPipelineState;
if (dss) {
if (cache.dss.depthTest !== dss.depthTest) {
if (dss.depthTest) {
gl.enable(gl.DEPTH_TEST);
} else {
gl.disable(gl.DEPTH_TEST);
}
cache.dss.depthTest = dss.depthTest;
}
if (cache.dss.depthWrite !== dss.depthWrite) {
gl.depthMask(dss.depthWrite);
cache.dss.depthWrite = dss.depthWrite;
}
if (cache.dss.depthFunc !== dss.depthFunc) {
gl.depthFunc(WebGLCmpFuncs[dss.depthFunc]);
cache.dss.depthFunc = dss.depthFunc;
}
// front
if ((cache.dss.stencilTestFront !== dss.stencilTestFront)
|| (cache.dss.stencilTestBack !== dss.stencilTestBack)) {
if (dss.stencilTestFront || dss.stencilTestBack) {
gl.enable(gl.STENCIL_TEST);
} else {
gl.disable(gl.STENCIL_TEST);
}
cache.dss.stencilTestFront = dss.stencilTestFront;
cache.dss.stencilTestBack = dss.stencilTestBack;
}
if ((cache.dss.stencilFuncFront !== dss.stencilFuncFront)
|| (cache.dss.stencilRefFront !== dss.stencilRefFront)
|| (cache.dss.stencilReadMaskFront !== dss.stencilReadMaskFront)) {
gl.stencilFuncSeparate(
gl.FRONT,
WebGLCmpFuncs[dss.stencilFuncFront],
dss.stencilRefFront,
dss.stencilReadMaskFront,
);
cache.dss.stencilFuncFront = dss.stencilFuncFront;
cache.dss.stencilRefFront = dss.stencilRefFront;
cache.dss.stencilReadMaskFront = dss.stencilReadMaskFront;
}
if ((cache.dss.stencilFailOpFront !== dss.stencilFailOpFront)
|| (cache.dss.stencilZFailOpFront !== dss.stencilZFailOpFront)
|| (cache.dss.stencilPassOpFront !== dss.stencilPassOpFront)) {
gl.stencilOpSeparate(
gl.FRONT,
WebGLStencilOps[dss.stencilFailOpFront],
WebGLStencilOps[dss.stencilZFailOpFront],
WebGLStencilOps[dss.stencilPassOpFront],
);
cache.dss.stencilFailOpFront = dss.stencilFailOpFront;
cache.dss.stencilZFailOpFront = dss.stencilZFailOpFront;
cache.dss.stencilPassOpFront = dss.stencilPassOpFront;
}
if (cache.dss.stencilWriteMaskFront !== dss.stencilWriteMaskFront) {
gl.stencilMaskSeparate(gl.FRONT, dss.stencilWriteMaskFront);
cache.dss.stencilWriteMaskFront = dss.stencilWriteMaskFront;
}
// back
if ((cache.dss.stencilFuncBack !== dss.stencilFuncBack)
|| (cache.dss.stencilRefBack !== dss.stencilRefBack)
|| (cache.dss.stencilReadMaskBack !== dss.stencilReadMaskBack)) {
gl.stencilFuncSeparate(
gl.BACK,
WebGLCmpFuncs[dss.stencilFuncBack],
dss.stencilRefBack,
dss.stencilReadMaskBack,
);
cache.dss.stencilFuncBack = dss.stencilFuncBack;
cache.dss.stencilRefBack = dss.stencilRefBack;
cache.dss.stencilReadMaskBack = dss.stencilReadMaskBack;
}
if ((cache.dss.stencilFailOpBack !== dss.stencilFailOpBack)
|| (cache.dss.stencilZFailOpBack !== dss.stencilZFailOpBack)
|| (cache.dss.stencilPassOpBack !== dss.stencilPassOpBack)) {
gl.stencilOpSeparate(
gl.BACK,
WebGLStencilOps[dss.stencilFailOpBack],
WebGLStencilOps[dss.stencilZFailOpBack],
WebGLStencilOps[dss.stencilPassOpBack],
);
cache.dss.stencilFailOpBack = dss.stencilFailOpBack;
cache.dss.stencilZFailOpBack = dss.stencilZFailOpBack;
cache.dss.stencilPassOpBack = dss.stencilPassOpBack;
}
if (cache.dss.stencilWriteMaskBack !== dss.stencilWriteMaskBack) {
gl.stencilMaskSeparate(gl.BACK, dss.stencilWriteMaskBack);
cache.dss.stencilWriteMaskBack = dss.stencilWriteMaskBack;
}
} // depth-stencil state
// blend state
const { bs } = gpuPipelineState;
if (bs) {
if (cache.bs.isA2C !== bs.isA2C) {
if (bs.isA2C) {
gl.enable(gl.SAMPLE_ALPHA_TO_COVERAGE);
} else {
gl.disable(gl.SAMPLE_ALPHA_TO_COVERAGE);
}
cache.bs.isA2C = bs.isA2C;
}
if ((cache.bs.blendColor.x !== bs.blendColor.x)
|| (cache.bs.blendColor.y !== bs.blendColor.y)
|| (cache.bs.blendColor.z !== bs.blendColor.z)
|| (cache.bs.blendColor.w !== bs.blendColor.w)) {
gl.blendColor(bs.blendColor.x, bs.blendColor.y, bs.blendColor.z, bs.blendColor.w);
cache.bs.blendColor.x = bs.blendColor.x;
cache.bs.blendColor.y = bs.blendColor.y;
cache.bs.blendColor.z = bs.blendColor.z;
cache.bs.blendColor.w = bs.blendColor.w;
}
const target0 = bs.targets[0];
const target0Cache = cache.bs.targets[0];
if (target0Cache.blend !== target0.blend) {
if (target0.blend) {
gl.enable(gl.BLEND);
} else {
gl.disable(gl.BLEND);
}
target0Cache.blend = target0.blend;
}
if ((target0Cache.blendEq !== target0.blendEq)
|| (target0Cache.blendAlphaEq !== target0.blendAlphaEq)) {
gl.blendEquationSeparate(WebGLBlendOps[target0.blendEq], WebGLBlendOps[target0.blendAlphaEq]);
target0Cache.blendEq = target0.blendEq;
target0Cache.blendAlphaEq = target0.blendAlphaEq;
}
if ((target0Cache.blendSrc !== target0.blendSrc)
|| (target0Cache.blendDst !== target0.blendDst)
|| (target0Cache.blendSrcAlpha !== target0.blendSrcAlpha)
|| (target0Cache.blendDstAlpha !== target0.blendDstAlpha)) {
gl.blendFuncSeparate(
WebGLBlendFactors[target0.blendSrc],
WebGLBlendFactors[target0.blendDst],
WebGLBlendFactors[target0.blendSrcAlpha],
WebGLBlendFactors[target0.blendDstAlpha],
);
target0Cache.blendSrc = target0.blendSrc;
target0Cache.blendDst = target0.blendDst;
target0Cache.blendSrcAlpha = target0.blendSrcAlpha;
target0Cache.blendDstAlpha = target0.blendDstAlpha;
}
if (target0Cache.blendColorMask !== target0.blendColorMask) {
gl.colorMask(
(target0.blendColorMask & ColorMask.R) !== ColorMask.NONE,
(target0.blendColorMask & ColorMask.G) !== ColorMask.NONE,
(target0.blendColorMask & ColorMask.B) !== ColorMask.NONE,
(target0.blendColorMask & ColorMask.A) !== ColorMask.NONE,
);
target0Cache.blendColorMask = target0.blendColorMask;
}
} // blend state
} // bind pipeline
// bind descriptor sets
if (gpuPipelineState && gpuPipelineState.gpuPipelineLayout && gpuShader) {
const blockLen = gpuShader.glBlocks.length;
const { dynamicOffsetIndices } = gpuPipelineState.gpuPipelineLayout;
for (let j = 0; j < blockLen; j++) {
const glBlock = gpuShader.glBlocks[j];
const gpuDescriptorSet = gpuDescriptorSets[glBlock.set];
const descriptorIndex = gpuDescriptorSet && gpuDescriptorSet.descriptorIndices[glBlock.binding];
const gpuDescriptor = descriptorIndex >= 0 && gpuDescriptorSet.gpuDescriptors[descriptorIndex];
if (!gpuDescriptor || !gpuDescriptor.gpuBuffer) {
error(`Buffer binding '${glBlock.name}' at set ${glBlock.set} binding ${glBlock.binding} is not bounded`);
continue;
}
const dynamicOffsetIndexSet = dynamicOffsetIndices[glBlock.set];
const dynamicOffsetIndex = dynamicOffsetIndexSet && dynamicOffsetIndexSet[glBlock.binding];
let offset = gpuDescriptor.gpuBuffer.glOffset;
if (dynamicOffsetIndex >= 0) { offset += dynamicOffsets[dynamicOffsetIndex]; }
if (cache.glBindUBOs[glBlock.glBinding] !== gpuDescriptor.gpuBuffer.glBuffer
|| cache.glBindUBOOffsets[glBlock.glBinding] !== offset) {
if (offset) {
gl.bindBufferRange(gl.UNIFORM_BUFFER, glBlock.glBinding, gpuDescriptor.gpuBuffer.glBuffer,
offset, gpuDescriptor.gpuBuffer.size);
} else {
gl.bindBufferBase(gl.UNIFORM_BUFFER, glBlock.glBinding, gpuDescriptor.gpuBuffer.glBuffer);
}
cache.glUniformBuffer = cache.glBindUBOs[glBlock.glBinding] = gpuDescriptor.gpuBuffer.glBuffer;
cache.glBindUBOOffsets[glBlock.glBinding] = offset;
}
}
const samplerLen = gpuShader.glSamplerTextures.length;
for (let i = 0; i < samplerLen; i++) {
const glSampler = gpuShader.glSamplerTextures[i];
const gpuDescriptorSet = gpuDescriptorSets[glSampler.set];
let descriptorIndex = gpuDescriptorSet && gpuDescriptorSet.descriptorIndices[glSampler.binding];
let gpuDescriptor = descriptorIndex >= 0 && gpuDescriptorSet.gpuDescriptors[descriptorIndex];
for (let l = 0; l < glSampler.units.length; l++) {
const texUnit = glSampler.units[l];
const glTexUnit = cache.glTexUnits[texUnit];
if (!gpuDescriptor || !gpuDescriptor.gpuTexture || !gpuDescriptor.gpuSampler) {
error(`Sampler binding '${glSampler.name}' at set ${glSampler.set} binding ${glSampler.binding} index ${l} is not bounded`);
continue;
}
if (gpuDescriptor.gpuTexture
&& gpuDescriptor.gpuTexture.size > 0) {
const { gpuTexture } = gpuDescriptor;
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
if (cache.texUnit !== texUnit) {
gl.activeTexture(gl.TEXTURE0 + texUnit);
cache.texUnit = texUnit;
}
if (gpuTexture.glTexture) {
gl.bindTexture(gpuTexture.glTarget, gpuTexture.glTexture);
} else {
gl.bindTexture(gpuTexture.glTarget, device.nullTex2D.gpuTexture.glTexture);
}
glTexUnit.glTexture = gpuTexture.glTexture;
}
const { gpuSampler } = gpuDescriptor;
if (cache.glSamplerUnits[texUnit] !== gpuSampler.glSampler) {
gl.bindSampler(texUnit, gpuSampler.glSampler);
cache.glSamplerUnits[texUnit] = gpuSampler.glSampler;
}
}
gpuDescriptor = gpuDescriptorSet.gpuDescriptors[++descriptorIndex];
}
}
} // bind descriptor sets
// bind vertex/index buffer
if (gpuInputAssembler && gpuShader
&& (isShaderChanged || gfxStateCache.gpuInputAssembler !== gpuInputAssembler)) {
gfxStateCache.gpuInputAssembler = gpuInputAssembler;
if (device.extensions.useVAO) {
// check vao
let glVAO = gpuInputAssembler.glVAOs.get(gpuShader.glProgram!);
if (!glVAO) {
glVAO = gl.createVertexArray()!;
gpuInputAssembler.glVAOs.set(gpuShader.glProgram!, glVAO);
gl.bindVertexArray(glVAO);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
cache.glArrayBuffer = null;
cache.glElementArrayBuffer = null;
let glAttrib: IWebGL2Attrib | null;
for (let j = 0; j < gpuShader.glInputs.length; j++) {
const glInput = gpuShader.glInputs[j];
glAttrib = null;
for (let k = 0; k < gpuInputAssembler.glAttribs.length; k++) {
const attrib = gpuInputAssembler.glAttribs[k];
if (attrib.name === glInput.name) {
glAttrib = attrib;
break;
}
}
if (glAttrib) {
if (cache.glArrayBuffer !== glAttrib.glBuffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, glAttrib.glBuffer);
cache.glArrayBuffer = glAttrib.glBuffer;
}
for (let c = 0; c < glAttrib.componentCount; ++c) {
const glLoc = glInput.glLoc + c;
const attribOffset = glAttrib.offset + glAttrib.size * c;
gl.enableVertexAttribArray(glLoc);
cache.glCurrentAttribLocs[glLoc] = true;
gl.vertexAttribPointer(glLoc, glAttrib.count, glAttrib.glType, glAttrib.isNormalized, glAttrib.stride, attribOffset);
gl.vertexAttribDivisor(glLoc, glAttrib.isInstanced ? 1 : 0);
}
}
}
const gpuBuffer = gpuInputAssembler.gpuIndexBuffer;
if (gpuBuffer) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.glBuffer);
}
gl.bindVertexArray(null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
cache.glArrayBuffer = null;
cache.glElementArrayBuffer = null;
}
if (cache.glVAO !== glVAO) {
gl.bindVertexArray(glVAO);
cache.glVAO = glVAO;
}
} else {
for (let a = 0; a < device.capabilities.maxVertexAttributes; ++a) {
cache.glCurrentAttribLocs[a] = false;
}
for (let j = 0; j < gpuShader.glInputs.length; j++) {
const glInput = gpuShader.glInputs[j];
let glAttrib: IWebGL2Attrib | null = null;
for (let k = 0; k < gpuInputAssembler.glAttribs.length; k++) {
const attrib = gpuInputAssembler.glAttribs[k];
if (attrib.name === glInput.name) {
glAttrib = attrib;
break;
}
}
if (glAttrib) {
if (cache.glArrayBuffer !== glAttrib.glBuffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, glAttrib.glBuffer);
cache.glArrayBuffer = glAttrib.glBuffer;
}
for (let c = 0; c < glAttrib.componentCount; ++c) {
const glLoc = glInput.glLoc + c;
const attribOffset = glAttrib.offset + glAttrib.size * c;
if (!cache.glEnabledAttribLocs[glLoc] && glLoc >= 0) {
gl.enableVertexAttribArray(glLoc);
cache.glEnabledAttribLocs[glLoc] = true;
}
cache.glCurrentAttribLocs[glLoc] = true;
gl.vertexAttribPointer(glLoc, glAttrib.count, glAttrib.glType, glAttrib.isNormalized, glAttrib.stride, attribOffset);
gl.vertexAttribDivisor(glLoc, glAttrib.isInstanced ? 1 : 0);
}
}
} // for
const gpuBuffer = gpuInputAssembler.gpuIndexBuffer;
if (gpuBuffer) {
if (cache.glElementArrayBuffer !== gpuBuffer.glBuffer) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gpuBuffer.glBuffer);
cache.glElementArrayBuffer = gpuBuffer.glBuffer;
}
}
for (let a = 0; a < device.capabilities.maxVertexAttributes; ++a) {
if (cache.glEnabledAttribLocs[a] !== cache.glCurrentAttribLocs[a]) {
gl.disableVertexAttribArray(a);
cache.glEnabledAttribLocs[a] = false;
}
}
}
} // bind vertex/index buffer
// update dynamic states
if (gpuPipelineState && gpuPipelineState.dynamicStates.length) {
const dsLen = gpuPipelineState.dynamicStates.length;
for (let k = 0; k < dsLen; k++) {
const dynamicState = gpuPipelineState.dynamicStates[k];
switch (dynamicState) {
case DynamicStateFlagBit.LINE_WIDTH: {
if (cache.rs.lineWidth !== dynamicStates.lineWidth) {
gl.lineWidth(dynamicStates.lineWidth);
cache.rs.lineWidth = dynamicStates.lineWidth;
}
break;
}
case DynamicStateFlagBit.DEPTH_BIAS: {
if (cache.rs.depthBias !== dynamicStates.depthBiasConstant
|| cache.rs.depthBiasSlop !== dynamicStates.depthBiasSlope) {
gl.polygonOffset(dynamicStates.depthBiasConstant, dynamicStates.depthBiasSlope);
cache.rs.depthBias = dynamicStates.depthBiasConstant;
cache.rs.depthBiasSlop = dynamicStates.depthBiasSlope;
}
break;
}
case DynamicStateFlagBit.BLEND_CONSTANTS: {
const blendConstant = dynamicStates.blendConstant;
if ((cache.bs.blendColor.x !== blendConstant.x)
|| (cache.bs.blendColor.y !== blendConstant.y)
|| (cache.bs.blendColor.z !== blendConstant.z)
|| (cache.bs.blendColor.w !== blendConstant.w)) {
gl.blendColor(blendConstant.x, blendConstant.y, blendConstant.z, blendConstant.w);
cache.bs.blendColor.copy(blendConstant);
}
break;
}
case DynamicStateFlagBit.STENCIL_WRITE_MASK: {
const front = dynamicStates.stencilStatesFront;
const back = dynamicStates.stencilStatesBack;
if (cache.dss.stencilWriteMaskFront !== front.writeMask) {
gl.stencilMaskSeparate(gl.FRONT, front.writeMask);
cache.dss.stencilWriteMaskFront = front.writeMask;
}
if (cache.dss.stencilWriteMaskBack !== back.writeMask) {
gl.stencilMaskSeparate(gl.BACK, back.writeMask);
cache.dss.stencilWriteMaskBack = back.writeMask;
}
break;
}
case DynamicStateFlagBit.STENCIL_COMPARE_MASK: {
const front = dynamicStates.stencilStatesFront;
const back = dynamicStates.stencilStatesBack;
if (cache.dss.stencilRefFront !== front.reference
|| cache.dss.stencilReadMaskFront !== front.compareMask) {
gl.stencilFuncSeparate(gl.FRONT, WebGLCmpFuncs[cache.dss.stencilFuncFront], front.reference, front.compareMask);
cache.dss.stencilRefFront = front.reference;
cache.dss.stencilReadMaskFront = front.compareMask;
}
if (cache.dss.stencilRefBack !== back.reference
|| cache.dss.stencilReadMaskBack !== back.compareMask) {
gl.stencilFuncSeparate(gl.BACK, WebGLCmpFuncs[cache.dss.stencilFuncBack], back.reference, back.compareMask);
cache.dss.stencilRefBack = back.reference;
cache.dss.stencilReadMaskBack = back.compareMask;
}
break;
}
default:
} // switch
} // for
} // update dynamic states
}
export function WebGL2CmdFuncDraw (device: WebGL2Device, drawInfo: DrawInfo) {
const { gl } = device;
const { gpuInputAssembler, glPrimitive } = gfxStateCache;
const md = device.extensions.WEBGL_multi_draw;
if (gpuInputAssembler) {
const indexBuffer = gpuInputAssembler.gpuIndexBuffer;
if (gpuInputAssembler.gpuIndirectBuffer) {
const { indirects } = gpuInputAssembler.gpuIndirectBuffer;
if (indirects.drawByIndex) {
for (let j = 0; j < indirects.drawCount; j++) {
indirects.byteOffsets[j] = indirects.offsets[j] * indexBuffer!.stride;
}
if (md) {
if (indirects.instancedDraw) {
md.multiDrawElementsInstancedWEBGL(glPrimitive,
indirects.counts, 0,
gpuInputAssembler.glIndexType,
indirects.byteOffsets, 0,
indirects.instances, 0,
indirects.drawCount);
} else {
md.multiDrawElementsWEBGL(glPrimitive,
indirects.counts, 0,
gpuInputAssembler.glIndexType,
indirects.byteOffsets, 0,
indirects.drawCount);
}
} else {
for (let j = 0; j < indirects.drawCount; j++) {
if (indirects.instances[j] > 1) {
gl.drawElementsInstanced(glPrimitive, indirects.counts[j],
gpuInputAssembler.glIndexType, indirects.byteOffsets[j], indirects.instances[j]);
} else {
gl.drawElements(glPrimitive, indirects.counts[j], gpuInputAssembler.glIndexType, indirects.byteOffsets[j]);
}
}
}
} else if (md) {
if (indirects.instancedDraw) {
md.multiDrawArraysInstancedWEBGL(glPrimitive,
indirects.offsets, 0,
indirects.counts, 0,
indirects.instances, 0,
indirects.drawCount);
} else {
md.multiDrawArraysWEBGL(glPrimitive,
indirects.offsets, 0,
indirects.counts, 0,
indirects.drawCount);
}
} else {
for (let j = 0; j < indirects.drawCount; j++) {
if (indirects.instances[j] > 1) {
gl.drawArraysInstanced(glPrimitive, indirects.offsets[j], indirects.counts[j], indirects.instances[j]);
} else {
gl.drawArrays(glPrimitive, indirects.offsets[j], indirects.counts[j]);
}
}
}
} else if (drawInfo.instanceCount) {
if (indexBuffer) {
if (drawInfo.indexCount > 0) {
const offset = drawInfo.firstIndex * indexBuffer.stride;
gl.drawElementsInstanced(glPrimitive, drawInfo.indexCount,
gpuInputAssembler.glIndexType, offset, drawInfo.instanceCount);
}
} else if (drawInfo.vertexCount > 0) {
gl.drawArraysInstanced(glPrimitive, drawInfo.firstVertex, drawInfo.vertexCount, drawInfo.instanceCount);
}
} else if (indexBuffer) {
if (drawInfo.indexCount > 0) {
const offset = drawInfo.firstIndex * indexBuffer.stride;
gl.drawElements(glPrimitive, drawInfo.indexCount, gpuInputAssembler.glIndexType, offset);
}
} else if (drawInfo.vertexCount > 0) {
gl.drawArrays(glPrimitive, drawInfo.firstVertex, drawInfo.vertexCount);
}
}
}
const cmdIds = new Array<number>(WebGL2Cmd.COUNT);
export function WebGL2CmdFuncExecuteCmds (device: WebGL2Device, cmdPackage: WebGL2CmdPackage) {
cmdIds.fill(0);
for (let i = 0; i < cmdPackage.cmds.length; ++i) {
const cmd = cmdPackage.cmds.array[i];
const cmdId = cmdIds[cmd]++;
switch (cmd) {
case WebGL2Cmd.BEGIN_RENDER_PASS: {
const cmd0 = cmdPackage.beginRenderPassCmds.array[cmdId];
WebGL2CmdFuncBeginRenderPass(device, cmd0.gpuRenderPass, cmd0.gpuFramebuffer, cmd0.renderArea,
cmd0.clearColors, cmd0.clearDepth, cmd0.clearStencil);
break;
}
/*
case WebGL2Cmd.END_RENDER_PASS: {
// WebGL 2.0 doesn't support store operation of attachments.
// StoreOp.Store is the default GL behavior.
break;
}
*/
case WebGL2Cmd.BIND_STATES: {
const cmd2 = cmdPackage.bindStatesCmds.array[cmdId];
WebGL2CmdFuncBindStates(device, cmd2.gpuPipelineState, cmd2.gpuInputAssembler,
cmd2.gpuDescriptorSets, cmd2.dynamicOffsets, cmd2.dynamicStates);
break;
}
case WebGL2Cmd.DRAW: {
const cmd3 = cmdPackage.drawCmds.array[cmdId];
WebGL2CmdFuncDraw(device, cmd3.drawInfo);
break;
}
case WebGL2Cmd.UPDATE_BUFFER: {
const cmd4 = cmdPackage.updateBufferCmds.array[cmdId];
WebGL2CmdFuncUpdateBuffer(device, cmd4.gpuBuffer as IWebGL2GPUBuffer, cmd4.buffer as BufferSource, cmd4.offset, cmd4.size);
break;
}
case WebGL2Cmd.COPY_BUFFER_TO_TEXTURE: {
const cmd5 = cmdPackage.copyBufferToTextureCmds.array[cmdId];
WebGL2CmdFuncCopyBuffersToTexture(device, cmd5.buffers, cmd5.gpuTexture as IWebGL2GPUTexture, cmd5.regions);
break;
}
default:
} // switch
} // for
}
export function WebGL2CmdFuncCopyTexImagesToTexture (
device: WebGL2Device,
texImages: TexImageSource[],
gpuTexture: IWebGL2GPUTexture,
regions: BufferTextureCopy[],
) {
const { gl } = device;
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gpuTexture.glTarget, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
let n = 0;
let f = 0;
switch (gpuTexture.glTarget) {
case gl.TEXTURE_2D: {
for (let k = 0; k < regions.length; k++) {
const region = regions[k];
gl.texSubImage2D(gl.TEXTURE_2D, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y,
gpuTexture.glFormat, gpuTexture.glType, texImages[n++]);
}
break;
}
case gl.TEXTURE_CUBE_MAP: {
for (let k = 0; k < regions.length; k++) {
const region = regions[k];
const fcount = region.texSubres.baseArrayLayer + region.texSubres.layerCount;
for (f = region.texSubres.baseArrayLayer; f < fcount; ++f) {
gl.texSubImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y,
gpuTexture.glFormat, gpuTexture.glType, texImages[n++]);
}
}
break;
}
default: {
console.error('Unsupported GL texture type, copy buffer to texture failed.');
}
}
if (gpuTexture.flags & TextureFlagBit.GEN_MIPMAP) {
gl.generateMipmap(gpuTexture.glTarget);
}
}
export function WebGL2CmdFuncCopyBuffersToTexture (
device: WebGL2Device,
buffers: ArrayBufferView[],
gpuTexture: IWebGL2GPUTexture,
regions: BufferTextureCopy[],
) {
const { gl } = device;
const glTexUnit = device.stateCache.glTexUnits[device.stateCache.texUnit];
if (glTexUnit.glTexture !== gpuTexture.glTexture) {
gl.bindTexture(gpuTexture.glTarget, gpuTexture.glTexture);
glTexUnit.glTexture = gpuTexture.glTexture;
}
let n = 0;
let w = 1;
let h = 1;
let f = 0;
const fmtInfo: FormatInfo = FormatInfos[gpuTexture.format];
const { isCompressed } = fmtInfo;
switch (gpuTexture.glTarget) {
case gl.TEXTURE_2D: {
for (let k = 0; k < regions.length; k++) {
const region = regions[k];
w = region.texExtent.width;
h = region.texExtent.height;
const pixels = buffers[n++];
if (!isCompressed) {
gl.texSubImage2D(gl.TEXTURE_2D, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y, w, h,
gpuTexture.glFormat, gpuTexture.glType, pixels);
} else if (gpuTexture.glInternalFmt !== WebGL2EXT.COMPRESSED_RGB_ETC1_WEBGL) {
gl.compressedTexSubImage2D(gl.TEXTURE_2D, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y, w, h,
gpuTexture.glFormat, pixels);
} else { // WEBGL_compressed_texture_etc1
gl.compressedTexImage2D(gl.TEXTURE_2D, region.texSubres.mipLevel,
gpuTexture.glInternalFmt, w, h, 0, pixels);
}
}
break;
}
case gl.TEXTURE_CUBE_MAP: {
for (let k = 0; k < regions.length; k++) {
const region = regions[k];
const fcount = region.texSubres.baseArrayLayer + region.texSubres.layerCount;
for (f = region.texSubres.baseArrayLayer; f < fcount; ++f) {
w = region.texExtent.width;
h = region.texExtent.height;
const pixels = buffers[n++];
if (!isCompressed) {
gl.texSubImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y, w, h,
gpuTexture.glFormat, gpuTexture.glType, pixels);
} else if (gpuTexture.glInternalFmt !== WebGL2EXT.COMPRESSED_RGB_ETC1_WEBGL) {
gl.compressedTexSubImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, region.texSubres.mipLevel,
region.texOffset.x, region.texOffset.y, w, h,
gpuTexture.glFormat, pixels);
} else { // WEBGL_compressed_texture_etc1
gl.compressedTexImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + f, region.texSubres.mipLevel,
gpuTexture.glInternalFmt, w, h, 0, pixels);
}
}
}
break;
}
default: {
console.error('Unsupported GL texture type, copy buffer to texture failed.');
}
}
if (gpuTexture.flags & TextureFlagBit.GEN_MIPMAP) {
gl.generateMipmap(gpuTexture.glTarget);
}
}
export function WebGL2CmdFuncCopyTextureToBuffers (
device: WebGL2Device,
gpuTexture: IWebGL2GPUTexture,
buffers: ArrayBufferView[],
regions: BufferTextureCopy[],
) {
const { gl } = device;
const cache = device.stateCache;
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
let x = 0;
let y = 0;
let w = 1;
let h = 1;
switch (gpuTexture.glTarget) {
case gl.TEXTURE_2D: {
for (let k = 0; k < regions.length; k++) {
const region = regions[k];
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gpuTexture.glTarget, gpuTexture.glTexture, region.texSubres.mipLevel);
x = region.texOffset.x;
y = region.texOffset.y;
w = region.texExtent.width;
h = region.texExtent.height;
gl.readPixels(x, y, w, h, gpuTexture.glFormat, gpuTexture.glType, buffers[k]);
}
break;
}
default: {
console.error('Unsupported GL texture type, copy texture to buffers failed.');
}
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
cache.glFramebuffer = null;
gl.deleteFramebuffer(framebuffer);
}
export function WebGL2CmdFuncBlitFramebuffer (
device: WebGL2Device,
src: IWebGL2GPUFramebuffer,
dst: IWebGL2GPUFramebuffer,
srcRect: Rect,
dstRect: Rect,
filter: Filter,
) {
const { gl } = device;
if (device.stateCache.glReadFramebuffer !== src.glFramebuffer) {
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, src.glFramebuffer);
device.stateCache.glReadFramebuffer = src.glFramebuffer;
}
const rebindFBO = (dst.glFramebuffer !== device.stateCache.glFramebuffer);
if (rebindFBO) {
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, dst.glFramebuffer);
}
let mask = 0;
if (src.gpuColorTextures.length > 0) {
mask |= gl.COLOR_BUFFER_BIT;
}
if (src.gpuDepthStencilTexture) {
mask |= gl.DEPTH_BUFFER_BIT;
if (FormatInfos[src.gpuDepthStencilTexture.format].hasStencil) {
mask |= gl.STENCIL_BUFFER_BIT;
}
}
const glFilter = (filter === Filter.LINEAR || filter === Filter.ANISOTROPIC) ? gl.LINEAR : gl.NEAREST;
gl.blitFramebuffer(
srcRect.x, srcRect.y, srcRect.x + srcRect.width, srcRect.y + srcRect.height,
dstRect.x, dstRect.y, dstRect.x + dstRect.width, dstRect.y + dstRect.height,
mask, glFilter,
);
if (rebindFBO) {
gl.bindFramebuffer(gl.FRAMEBUFFER, device.stateCache.glFramebuffer);
}
} | the_stack |
import { assert } from "chai";
import { getProgram } from "../../../src";
import { RUNTIME_ERRORS } from "../../../src/errors/errors-list";
import { Interpreter } from "../../../src/interpreter/interpreter";
import {
Add, Addr, Addw, And, AppGlobalDel, AppGlobalGet, AppGlobalGetEx,
AppGlobalPut, AppLocalDel, AppLocalGet, AppLocalGetEx, AppLocalPut,
AppOptedIn, Arg, Assert, Balance, BitwiseAnd, BitwiseNot, BitwiseOr, BitwiseXor,
Branch, BranchIfNotZero, BranchIfZero, Btoi, Byte, Bytec, Callsub,
Concat, Cover, Dig, Div, DivModw, Dup, Dup2, EcdsaPkDecompress, EcdsaPkRecover,
EcdsaVerify,
Ed25519verify, EqualTo, Err, Exp, Expw,
Extract, Extract3, ExtractUint16, ExtractUint32, ExtractUint64, Gaid, Gaids,
GetAssetDef, GetAssetHolding, GetBit, GetByte, Gload, Gloads, Global, GreaterThan,
GreaterThanEqualTo, Gtxn, Gtxna, Gtxns, Gtxnsa, Int, Intc, Itob, ITxnBegin, ITxnField,
ITxnSubmit, Keccak256, Label, Len, LessThan,
LessThanEqualTo, Load, Loads, MinBalance, Mod, Mul, Mulw, Not, NotEqualTo,
Or, Pop, Pragma, PushBytes, PushInt, Retsub,
Return, Select, SetBit, SetByte, Sha256, Sha512_256, Shl, Shr, Sqrt,
Store, Stores, Sub, Substring, Substring3, Swap, Txn, Txna, Uncover
} from "../../../src/interpreter/opcode-list";
import { MAX_UINT64, MaxTEALVersion, MIN_UINT64 } from "../../../src/lib/constants";
import { opcodeFromSentence, parser, wordsFromLine } from "../../../src/parser/parser";
import { Runtime } from "../../../src/runtime";
import { ExecutionMode } from "../../../src/types";
import { useFixture } from "../../helpers/integration";
import { expectRuntimeError } from "../../helpers/runtime-errors";
// base64 case needs to be verified at the time of decoding
describe("Parser", function () {
describe("Extract words from line", () => {
it("should return correct words for addr", function () {
let res = wordsFromLine("addr KAGKGFFKGKGFGLFFBSLFBJKSFB");
const expected = ["addr", "KAGKGFFKGKGFGLFFBSLFBJKSFB"];
assert.deepEqual(res, expected);
res = wordsFromLine("addr KAGKGFFKGKGFGLFFBSLFBJKSFB//comment here");
assert.deepEqual(res, expected);
res = wordsFromLine("addr KAGKGFFKGKGFGLFFBSLFBJKSFB //comment here");
assert.deepEqual(res, expected);
res = wordsFromLine("addr KAGKGFFKGKGFGLFFBSLFBJKSFB//comment here");
assert.deepEqual(res, expected);
res = wordsFromLine(" addr KAGKGFFKGKGFGLFFBSLFBJKSFB//comment here ");
assert.deepEqual(res, expected);
});
it("should return correct words for byte base64", () => {
let res = wordsFromLine("byte base64 BKBDKSKDK");
let expected = ["byte", "base64", "BKBDKSKDK"];
assert.deepEqual(res, expected);
res = wordsFromLine("byte base64(BKBDKSKDK)");
expected = ["byte", "base64(BKBDKSKDK)"];
assert.deepEqual(res, expected);
res = wordsFromLine("byte base64(BKBDKSKD/K)");
expected = ["byte", "base64(BKBDKSKD/K)"];
assert.deepEqual(res, expected);
res = wordsFromLine("byte base64(BKBDKSKDK//KBBJSKJB)");
expected = ["byte", "base64(BKBDKSKDK//KBBJSKJB)"];
assert.deepEqual(res, expected);
// Ignore `//` present in () because it may be a valid base64, but ignore outer comments
res = wordsFromLine("byte base64(BKBDKSKDK//KBBJSKJB) // comment here");
expected = ["byte", "base64(BKBDKSKDK//KBBJSKJB)"];
assert.deepEqual(res, expected);
});
it("should return correct words for byte base32", () => {
let res = wordsFromLine("byte base32 BKBDKSKDK//commenthere");
let expected = ["byte", "base32", "BKBDKSKDK"];
assert.deepEqual(res, expected);
res = wordsFromLine(" byte base32(BKBDKSKDK) //comment");
expected = ["byte", "base32(BKBDKSKDK)"];
assert.deepEqual(res, expected);
res = wordsFromLine("byte b32(BKBDKSKDK)");
expected = ["byte", "b32(BKBDKSKDK)"];
assert.deepEqual(res, expected);
res = wordsFromLine("byte b32 BKBDKSKDK//comment");
expected = ["byte", "b32", "BKBDKSKDK"];
assert.deepEqual(res, expected);
});
it("should return correct words for byte string literal", () => {
let res = wordsFromLine('byte "STRING LITERAL"');
let expected = ["byte", "\"STRING LITERAL\""];
assert.deepEqual(res, expected);
res = wordsFromLine('byte "STRING \\"NESTED STRING\\" END"');
expected = ["byte", "\"STRING \\\"NESTED STRING\\\" END\""];
assert.deepEqual(res, expected);
});
it("should return correct words for int", () => {
let res = wordsFromLine("int 123");
const expected = ["int", "123"];
assert.deepEqual(res, expected);
res = wordsFromLine("int 123//comment here");
assert.deepEqual(res, expected);
res = wordsFromLine(" int 123 //comment here");
assert.deepEqual(res, expected);
res = wordsFromLine("int 123 //comment here");
assert.deepEqual(res, expected);
});
it("should return correct words for operators", () => {
let res = wordsFromLine("+");
let expected = ["+"];
assert.deepEqual(res, expected);
res = wordsFromLine(" +//comment here");
assert.deepEqual(res, expected);
res = wordsFromLine("+ //comment here");
assert.deepEqual(res, expected);
res = wordsFromLine(" - //comment here");
expected = ["-"];
assert.deepEqual(res, expected);
res = wordsFromLine("- //comment here");
assert.deepEqual(res, expected);
res = wordsFromLine("/ //comment here");
expected = ["/"];
assert.deepEqual(res, expected);
res = wordsFromLine("* //comment here");
expected = ["*"];
assert.deepEqual(res, expected);
res = wordsFromLine(" * // comment here");
assert.deepEqual(res, expected);
});
// more edge cases
// space before parentheses,
// space after base64: base64 (xxx ), base64( xxx) ..
it("should extract correct words from line", () => {
let res = wordsFromLine("base64 (abcd)");
let expected = ["base64", "(abcd)"];
assert.deepEqual(res, expected);
res = wordsFromLine("base64 (abcd )");
expected = ["base64", "(abcd", ")"];
assert.deepEqual(res, expected);
res = wordsFromLine("base64( abcd)");
expected = ["base64(", "abcd)"];
assert.deepEqual(res, expected);
res = wordsFromLine("base64(ab cd)");
expected = ["base64(ab", "cd)"];
assert.deepEqual(res, expected);
res = wordsFromLine("base64 \"ab cd\"");
expected = ["base64", "\"ab cd\""];
assert.deepEqual(res, expected);
});
it("should extract correct words from line", () => {
let res = wordsFromLine("arg 1//comment here");
let expected = ["arg", "1"];
assert.deepEqual(res, expected);
res = wordsFromLine("arg_0// comment // comment // here");
expected = ["arg_0"];
assert.deepEqual(res, expected);
res = wordsFromLine("//comment int 2");
expected = [];
assert.deepEqual(res, expected);
res = wordsFromLine(" txn LastValid // comment here");
expected = ["txn", "LastValid"];
assert.deepEqual(res, expected);
res = wordsFromLine(" ed25519verify // here");
expected = ["ed25519verify"];
assert.deepEqual(res, expected);
res = wordsFromLine("/");
expected = ["/"];
assert.deepEqual(res, expected);
res = wordsFromLine("//");
expected = [];
assert.deepEqual(res, expected);
res = wordsFromLine("!//");
expected = ["!"];
assert.deepEqual(res, expected);
res = wordsFromLine("!=//");
expected = ["!="];
assert.deepEqual(res, expected);
res = wordsFromLine("%//here");
expected = ["%"];
assert.deepEqual(res, expected);
res = wordsFromLine("|//");
expected = ["|"];
assert.deepEqual(res, expected);
});
it("should extract correct stateful words", () => {
let res = wordsFromLine("app_opted_in//comment here");
let expected = ["app_opted_in"];
assert.deepEqual(res, expected);
res = wordsFromLine(" app_local_get // comment here");
expected = ["app_local_get"];
assert.deepEqual(res, expected);
res = wordsFromLine(" app_global_get_ex // comment here");
expected = ["app_global_get_ex"];
assert.deepEqual(res, expected);
res = wordsFromLine(" balance // comment here");
expected = ["balance"];
assert.deepEqual(res, expected);
});
});
describe("Opcode Objects from words", () => {
let interpreter: Interpreter;
beforeEach(function () {
interpreter = new Interpreter();
interpreter.tealVersion = MaxTEALVersion;
});
it("should return correct opcode object for '+'", () => {
const res = opcodeFromSentence(["+"], 1, interpreter);
const expected = new Add([], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for '+'", () => {
expectRuntimeError(
() => opcodeFromSentence(["+", "+"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should return correct opcode object for '-'", () => {
const res = opcodeFromSentence(["-"], 1, interpreter);
const expected = new Sub([], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for '-'", () => {
expectRuntimeError(
() => opcodeFromSentence(["-", "-"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should return correct opcode object for '/'", () => {
const res = opcodeFromSentence(["/"], 1, interpreter);
const expected = new Div([], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for '/'", () => {
expectRuntimeError(
() => opcodeFromSentence(["/", "/"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should return correct opcode object for '*'", () => {
const res = opcodeFromSentence(["*"], 1, interpreter);
const expected = new Mul([], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for '*'", () => {
expectRuntimeError(
() => opcodeFromSentence(["*", "*"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should return correct opcode object for 'addr'", () => {
const address = "WWYNX3TKQYVEREVSW6QQP3SXSFOCE3SKUSEIVJ7YAGUPEACNI5UGI4DZCE";
const res = opcodeFromSentence(["addr", address], 1, interpreter);
const expected = new Addr([address], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for 'addr'", () => {
expectRuntimeError(
() => opcodeFromSentence(["addr"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should throw error for invalid address for 'addr'", () => {
expectRuntimeError(
() => opcodeFromSentence(["addr", "AKGH12"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_ADDR
);
});
it("should return correct opcode object for 'int'", () => {
const value = "812546821";
const res = opcodeFromSentence(["int", value], 1, interpreter);
const expected = new Int([value], 1);
assert.deepEqual(res, expected);
});
it("should throw error for wrong field length for 'int'", () => {
expectRuntimeError(
() => opcodeFromSentence(["int"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("should throw error for invalid number for 'int'", () => {
expectRuntimeError(
() => opcodeFromSentence(["int", "123A12"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
expectRuntimeError(
() => opcodeFromSentence(["int", String(MAX_UINT64 + 5n)], 1, interpreter),
RUNTIME_ERRORS.TEAL.UINT64_OVERFLOW
);
expectRuntimeError(
() => opcodeFromSentence(["int", String(MIN_UINT64 - 5n)], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
});
it("should return correct label", () => {
const res = opcodeFromSentence(["label:"], 1, interpreter);
const expected = new Label(["label:"], 1);
assert.deepEqual(res, expected);
});
it("should throw error if wrong label is used", () => {
expectRuntimeError(
() => opcodeFromSentence(["substring:"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_LABEL
);
});
it("should return correct objects for `txn`", () => {
let res = opcodeFromSentence(["txn", "Fee"], 1, interpreter);
let expected = new Txn(["Fee"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["txn", "Accounts", "1"], 1, interpreter);
expected = new Txn(["Accounts", "1"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["txn", "ApplicationArgs", "0"], 1, interpreter);
expected = new Txn(["ApplicationArgs", "0"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["txn", "Fee", "Fee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["txn", "fee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_TRANSACTION_FIELD
);
});
it("should return correct object for `gtxn`", () => {
let res = opcodeFromSentence(["gtxn", "0", "Fee"], 1, interpreter);
let expected = new Gtxn(["0", "Fee"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["gtxn", "0", "ApplicationArgs", "0"], 1, interpreter);
expected = new Gtxn(["0", "ApplicationArgs", "0"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gtxn", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["gtxn", "1AA", "Fee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
});
it("should return correct object for `txna`", () => {
let res = opcodeFromSentence(["txna", "Accounts", "0"], 1, interpreter);
let expected = new Txna(["Accounts", "0"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["txna", "ApplicationArgs", "2"], 1, interpreter);
expected = new Txna(["ApplicationArgs", "2"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["txna", "Fee", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_OP_ARG
);
expectRuntimeError(
() => opcodeFromSentence(["txna", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["txna", "Fee", "A"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
});
it("should return correct object for `gtxna`", () => {
let res = opcodeFromSentence(["gtxna", "1", "Accounts", "1"], 1, interpreter);
let expected = new Gtxna(["1", "Accounts", "1"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["gtxna", "1", "ApplicationArgs", "4"], 1, interpreter);
expected = new Gtxna(["1", "ApplicationArgs", "4"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gtxna", "1", "Fee", "4"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_OP_ARG
);
expectRuntimeError(
() => opcodeFromSentence(["gtxna", "1", "2", "3", "4"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["gtxna", "1AB", "Fee", "4"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
});
it("should return correct objects for `global`", () => {
let res = opcodeFromSentence(["global", "MinTxnFee"], 1, interpreter);
let expected = new Global(["MinTxnFee"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "MinBalance"], 1, interpreter);
expected = new Global(["MinBalance"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "MaxTxnLife"], 1, interpreter);
expected = new Global(["MaxTxnLife"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "ZeroAddress"], 1, interpreter);
expected = new Global(["ZeroAddress"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "GroupSize"], 1, interpreter);
expected = new Global(["GroupSize"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "LogicSigVersion"], 1, interpreter);
expected = new Global(["LogicSigVersion"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "Round"], 1, interpreter);
expected = new Global(["Round"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "LatestTimestamp"], 1, interpreter);
expected = new Global(["LatestTimestamp"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "CurrentApplicationID"], 1, interpreter);
expected = new Global(["CurrentApplicationID"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "CreatorAddress"], 1, interpreter);
expected = new Global(["CreatorAddress"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["global", "GroupID"], 1, interpreter);
expected = new Global(["GroupID"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["global", "MinTxnFee", "MinTxnFee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["global", "mintxnfee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_GLOBAL_FIELD
);
expectRuntimeError(
() => opcodeFromSentence(["global", "minbalance"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_GLOBAL_FIELD
);
expectRuntimeError(
() => opcodeFromSentence(["global", "maxtxnlife"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_GLOBAL_FIELD
);
});
it("should return correct opcodes for `Balance` and `Asset` opcodes", () => {
let res = opcodeFromSentence(["balance"], 1, interpreter);
let expected = new Balance([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["balance", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["asset_holding_get", "AssetBalance"], 1, interpreter);
expected = new GetAssetHolding(["AssetBalance"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["asset_holding_get", "AssetBalance", "AssetFrozen"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["asset_params_get", "AssetTotal"], 1, interpreter);
expected = new GetAssetDef(["AssetTotal"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["asset_params_get", "AssetTotal", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["asset_params_get", "AssetCreator", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("TEALv5: should throw error for Asset Creator if LogicSigVersion < 5", () => {
interpreter.tealVersion = 4;
expectRuntimeError(
() => opcodeFromSentence(["asset_params_get", "AssetCreator"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_ASSET_FIELD
);
});
it("should return correct opcodes for Stateful opcodes", () => {
let res = opcodeFromSentence(["app_opted_in"], 1, interpreter);
let expected = new AppOptedIn([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_opted_in", "12", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_local_get"], 1, interpreter);
expected = new AppLocalGet([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_local_get", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_local_get_ex"], 1, interpreter);
expected = new AppLocalGetEx([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_local_get_ex", "22", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_global_get"], 1, interpreter);
expected = new AppGlobalGet([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_global_get", "12", "3"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_global_get_ex"], 1, interpreter);
expected = new AppGlobalGetEx([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_global_get_ex", "4"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_local_put"], 1, interpreter);
expected = new AppLocalPut([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_local_put", "1223"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_global_put"], 1, interpreter);
expected = new AppGlobalPut([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_global_put", "123"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_local_del"], 1, interpreter);
expected = new AppLocalDel([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_local_del", "3"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
res = opcodeFromSentence(["app_global_del"], 1, interpreter);
expected = new AppGlobalDel([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["app_global_del", "45"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
describe("should return correct opcodes for tealv3 ops", () => {
it("assert", () => {
const res = opcodeFromSentence(["assert"], 1, interpreter);
const expected = new Assert([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["assert", "1234"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("pushint", () => {
const res = opcodeFromSentence(["pushint", "345"], 1, interpreter);
const expected = new PushInt(["345"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["pushint", "345", "456"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError( // Int Constants(eg. NoOp) works with int x
() => opcodeFromSentence(["pushint", "NoOp"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
expectRuntimeError(
() => opcodeFromSentence(["pushint", (MAX_UINT64 + 10n).toString()], 1, interpreter),
RUNTIME_ERRORS.TEAL.UINT64_OVERFLOW
);
});
it("pushbytes", () => {
const res = opcodeFromSentence(["pushbytes", `"Algorand"`], 1, interpreter);
const expected = new PushBytes([`"Algorand"`], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["pushbytes", `"Algorand"`, `"Blockchain"`], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["pushbytes", `0x250001000192CD0000002F6D6E742F72`], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKOWN_DECODE_TYPE
);
});
it("swap", () => {
const res = opcodeFromSentence(["swap"], 1, interpreter);
const expected = new Swap([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["swap", "xyz"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("txn fields", () => {
let res = opcodeFromSentence(["txn", "Assets", "1"], 1, interpreter);
let expected = new Txn(["Assets", "1"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["txn", "Assets", "0", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["txn", "Assets", "random-string"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
res = opcodeFromSentence(["txn", "Applications", "0"], 1, interpreter);
expected = new Txn(["Applications", "0"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["txn", "Applications", "0", "11"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["txn", "Applications", "random-string"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
res = opcodeFromSentence(["txn", "NumAssets"], 1, interpreter);
expected = new Txn(["NumAssets"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["txn", "GlobalNumUint"], 1, interpreter);
expected = new Txn(["GlobalNumUint"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["txn", "NumAssets", "0"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
expectRuntimeError(
() => opcodeFromSentence(["txn", "GlobalNumUint", "0"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("getbit", () => {
const res = opcodeFromSentence(["getbit"], 1, interpreter);
const expected = new GetBit([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["getbit", "1234"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("setbit", () => {
const res = opcodeFromSentence(["setbit"], 1, interpreter);
const expected = new SetBit([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["setbit", "1234"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("getbyte", () => {
const res = opcodeFromSentence(["getbyte"], 1, interpreter);
const expected = new GetByte([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["getbyte", "1234"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("setbyte", () => {
const res = opcodeFromSentence(["setbyte"], 1, interpreter);
const expected = new SetByte([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["setbyte", "1234"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("dig", () => {
const res = opcodeFromSentence(["dig", "2"], 1, interpreter);
const expected = new Dig(["2"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["dig", "xyz"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
expectRuntimeError(
() => opcodeFromSentence(["dig", "2", "3"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("select", () => {
const res = opcodeFromSentence(["select"], 1, interpreter);
const expected = new Select([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["select", "xyz"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("gtxns", () => {
const res = opcodeFromSentence(["gtxns", "Amount"], 1, interpreter);
const expected = new Gtxns(["Amount"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gtxns", "amount"], 1, interpreter),
RUNTIME_ERRORS.TEAL.UNKNOWN_TRANSACTION_FIELD
);
// invalid because index 0 is fetched from top of stack
expectRuntimeError(
() => opcodeFromSentence(["gtxns", "0", "Amount"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("gtxnsa", () => {
const res = opcodeFromSentence(["gtxnsa", "ApplicationArgs", "0"], 1, interpreter);
const expected = new Gtxnsa(["ApplicationArgs", "0"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gtxnsa", "applicationargs", "0"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_OP_ARG
);
// invalid because index 0 is fetched from top of stack
expectRuntimeError(
() => opcodeFromSentence(["gtxnsa", "0", "ApplicationArgs", "0"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("min_balance", () => {
const res = opcodeFromSentence(["min_balance"], 1, interpreter);
const expected = new MinBalance([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["min_balance", "xyz"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
});
describe("should return correct opcodes for tealv4 ops", () => {
it("gload", () => {
const res = opcodeFromSentence(["gload", "0", "1"], 1, interpreter);
const expected = new Gload(["0", "1"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gload", "one", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
expectRuntimeError(
() => opcodeFromSentence(["gload", "0"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("gloads", () => {
const res = opcodeFromSentence(["gloads", "0"], 1, interpreter);
const expected = new Gloads(["0"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gloads", "one"], 1, interpreter),
RUNTIME_ERRORS.TEAL.INVALID_TYPE
);
expectRuntimeError(
() => opcodeFromSentence(["gloads", "0", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("callsub", () => {
const res = opcodeFromSentence(["callsub", "label"], 1, interpreter);
const expected = new Callsub(["label"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["callsub", "label1", "label2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("retsub", () => {
const res = opcodeFromSentence(["retsub"], 1, interpreter);
const expected = new Retsub([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["retsub", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("gaid", () => {
const res = opcodeFromSentence(["gaid", "2"], 1, interpreter);
const expected = new Gaid(["2"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gaid", "1", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("gaids", () => {
const res = opcodeFromSentence(["gaids"], 1, interpreter);
const expected = new Gaids([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["gaids", "1", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("divmodw", () => {
const res = opcodeFromSentence(["divmodw"], 1, interpreter);
const expected = new DivModw([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["divmodw", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("exp", () => {
const res = opcodeFromSentence(["exp"], 1, interpreter);
const expected = new Exp([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["exp", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("expw", () => {
const res = opcodeFromSentence(["expw"], 1, interpreter);
const expected = new Expw([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["expw", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("shl", () => {
const res = opcodeFromSentence(["shl"], 1, interpreter);
const expected = new Shl([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["shl", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("shr", () => {
const res = opcodeFromSentence(["shr"], 1, interpreter);
const expected = new Shr([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["shr", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("sqrt", () => {
const res = opcodeFromSentence(["sqrt"], 1, interpreter);
const expected = new Sqrt([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["sqrt", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
});
describe("should return correct opcodes for tealv5 ops", () => {
it("extract", () => {
const res = opcodeFromSentence(["extract", "1", "2"], 1, interpreter);
const expected = new Extract(["1", "2"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["extract", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("extract3", () => {
const res = opcodeFromSentence(["extract3"], 1, interpreter);
const expected = new Extract3([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["extract3", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("extract_uint16", () => {
const res = opcodeFromSentence(["extract_uint16"], 1, interpreter);
const expected = new ExtractUint16([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["extract_uint16", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("extract_uint32", () => {
const res = opcodeFromSentence(["extract_uint32"], 1, interpreter);
const expected = new ExtractUint32([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["extract_uint32", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("extract_uint64", () => {
const res = opcodeFromSentence(["extract_uint64"], 1, interpreter);
const expected = new ExtractUint64([], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["extract_uint64", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
});
describe("Tealv5: ECDSA opcodes", () => {
it("ecdsa_verify", () => {
const res = opcodeFromSentence(["ecdsa_verify", "0"], 1, interpreter);
const expected = new EcdsaVerify(["0"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["ecdsa_verify"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("ecdsa_pk_decompress", () => {
const res = opcodeFromSentence(["ecdsa_pk_decompress", "0"], 1, interpreter);
const expected = new EcdsaPkDecompress(["0"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["ecdsa_pk_decompress"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("ecdsa_pk_recover", () => {
const res = opcodeFromSentence(["ecdsa_pk_recover", "0"], 1, interpreter);
const expected = new EcdsaPkRecover(["0"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["ecdsa_pk_recover"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
});
describe("should return correct opcodes for tealv5 ops", () => {
it("loads", () => {
const res = opcodeFromSentence(['loads'], 1, interpreter);
const expected = new Loads([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["loads", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("stores", () => {
const res = opcodeFromSentence(['stores'], 1, interpreter);
const expected = new Stores([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["stores", "1"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("cover", () => {
const res = opcodeFromSentence(["cover", "1"], 1, interpreter);
const expected = new Cover(["1"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["cover", "1", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("uncover", () => {
const res = opcodeFromSentence(["uncover", "1"], 1, interpreter);
const expected = new Uncover(["1"], 1);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["uncover", "1", "2"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("itxn_begin", () => {
const res = opcodeFromSentence(["itxn_begin"], 1, interpreter);
const expected = new ITxnBegin([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["itxn_begin", "exxtra"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("itxn_field f", () => {
let res = opcodeFromSentence(["itxn_field", "Sender"], 1, interpreter);
let expected = new ITxnField(["Sender"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["itxn_field", "FreezeAsset"], 1, interpreter);
expected = new ITxnField(["FreezeAsset"], 1, interpreter);
assert.deepEqual(res, expected);
res = opcodeFromSentence(["itxn_field", "ConfigAssetTotal"], 1, interpreter);
expected = new ITxnField(["ConfigAssetTotal"], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["itxn_field", "Sender", "Fee"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
it("itxn_submit", () => {
const res = opcodeFromSentence(["itxn_submit"], 1, interpreter);
const expected = new ITxnSubmit([], 1, interpreter);
assert.deepEqual(res, expected);
expectRuntimeError(
() => opcodeFromSentence(["itxn_submit", "exxtra"], 1, interpreter),
RUNTIME_ERRORS.TEAL.ASSERT_LENGTH
);
});
});
});
const cryptoFile = "test-crypto.teal";
describe("Opcodes list from TEAL file", () => {
useFixture("teal-files");
let interpreter: Interpreter;
beforeEach(function () {
interpreter = new Interpreter();
interpreter.tealVersion = 2;
});
it("Should return correct opcode list for '+'", async () => {
const file1 = "test-file-1.teal";
let res = parser(getProgram(file1), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Int(["1"], 1), new Int(["3"], 2), new Add([], 3)];
assert.deepEqual(res, expected);
const expect = [new Pragma(["version", "4"], 1, interpreter), new Int(["1"], 2),
new Int(["3"], 3), new Add([], 4)];
res = parser(getProgram("test-file-2.teal"), ExecutionMode.SIGNATURE, interpreter);
assert.deepEqual(res, expect);
});
it("Should throw error if #pragma is not on 1st line", async () => {
let file = "test-pragma-1.teal";
expectRuntimeError(
() => parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter),
RUNTIME_ERRORS.TEAL.PRAGMA_NOT_AT_FIRST_LINE
);
file = "test-pragma-2.teal";
expectRuntimeError(
() => parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter),
RUNTIME_ERRORS.TEAL.PRAGMA_NOT_AT_FIRST_LINE
);
});
it("Should return correct opcode list for '-'", async () => {
const file = "test-file-3.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Int(["5"], 2),
new Int(["3"], 3),
new Sub([], 4)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for '/'", async () => {
const file = "test-file-4.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Int(["6"], 2),
new Int(["3"], 3),
new Div([], 6)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for '*'", async () => {
const file = "test-file-5.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Int(["5"], 4),
new Int(["3"], 6),
new Mul([], 10)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'addr'", async () => {
const file = "test-addr.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Addr(["WWYNX3TKQYVEREVSW6QQP3SXSFOCE3SKUSEIVJ7YAGUPEACNI5UGI4DZCE"], 2)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'byte'", async () => {
const file = "test-byte.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const byte64 = "QzYhq9JlYbn2QdOMrhyxVlNtNjeyvyJc/I8d8VAGfGc=";
const byte32 = "MFRGGZDFMY======";
const expected = [
new Byte(["b64", byte64], 1), new Byte(["b64", byte64], 2),
new Byte(["b64", byte64], 3), new Byte(["b64", byte64], 4),
new Byte(["b32", byte32], 5), new Byte(["b32", byte32], 6),
new Byte(["b32", byte32], 7), new Byte(["b32", byte32], 8)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Len and Err'", async () => {
const file = "test-len-err.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Len([], 1), new Err([], 2)];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Bitwise'", async () => {
const file = "test-bitwise.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new BitwiseOr([], 2),
new BitwiseAnd([], 4),
new BitwiseXor([], 6),
new BitwiseNot([], 7)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Mod'", async () => {
const file = "test-mod.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Int(["6"], 1), new Int(["3"], 2), new Mod([], 3)];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Arg'", async () => {
const file = "test-arg.teal";
interpreter.runtime = new Runtime([]);
interpreter.runtime.ctx.args = [new Uint8Array(0)];
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Arg(["0"], 1, interpreter)];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Intc and Bytec'", async () => {
const file = "test-int-bytec.teal";
interpreter.intcblock = [1n];
interpreter.bytecblock = [new Uint8Array(0)];
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Intc(["0"], 1, interpreter), new Bytec(["0"], 2, interpreter)];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Store and Load'", async () => {
const file = "test-store-load.teal";
interpreter.scratch = [1n];
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Store(["0"], 1, interpreter), new Load(["0"], 2, interpreter)];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'Crypto opcodes'", async () => {
const res = parser(getProgram(cryptoFile), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Sha256([], 1),
new Keccak256([], 2),
new Sha512_256([], 3),
new Ed25519verify([], 4)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'comparsions'", async () => {
const file = "test-compare.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new LessThan([], 1),
new GreaterThan([], 2),
new LessThanEqualTo([], 3),
new GreaterThanEqualTo([], 4),
new And([], 5),
new Or([], 6),
new EqualTo([], 7),
new NotEqualTo([], 8),
new Not([], 9)
];
assert.deepEqual(res, expected);
});
it("Should return correct opcode list for 'all others'", async () => {
const file = "test-others.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Itob([], 2),
new Btoi([], 3),
new Mulw([], 4),
new Addw([], 5),
new Pop([], 6),
new Dup([], 7),
new Dup2([], 8),
new Concat([], 9),
new Substring(["0", "4"], 10),
new Substring3([], 11)
];
assert.deepEqual(res, expected);
});
it("should return correct opcode list for 'b, bz, bnz'", async () => {
const file = "test-branch.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Branch(["label1"], 2, interpreter),
new BranchIfZero(["label2"], 3, interpreter),
new BranchIfNotZero(["label3"], 4, interpreter)
];
assert.deepEqual(res, expected);
});
it("should return correct opcode list for 'return'", async () => {
const file = "test-return.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Return([], 2, interpreter)];
assert.deepEqual(res, expected);
});
it("should return correct opcode list for 'Label'", async () => {
const file = "test-label.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [new Label(["label:"], 2)];
assert.deepEqual(res, expected);
});
it("should return correct opcode list for 'global'", async () => {
const file = "test-global.teal";
const res = parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter);
const expected = [
new Global(["MinTxnFee"], 3, interpreter),
new Global(["MinBalance"], 4, interpreter),
new Global(["MaxTxnLife"], 5, interpreter),
new Global(["ZeroAddress"], 6, interpreter),
new Global(["GroupSize"], 7, interpreter),
new Global(["LogicSigVersion"], 8, interpreter),
new Global(["Round"], 9, interpreter),
new Global(["LatestTimestamp"], 10, interpreter),
new Global(["CurrentApplicationID"], 11, interpreter)
];
assert.deepEqual(res, expected);
});
it("should return correct opcode list for `Stateful`", async () => {
const file = "test-stateful.teal";
const res = parser(getProgram(file), ExecutionMode.APPLICATION, interpreter);
const expected = [
new Pragma(["version", "4"], 1, interpreter),
new Balance([], 4, interpreter),
new GetAssetHolding(["AssetBalance"], 5, interpreter),
new GetAssetDef(["AssetTotal"], 6, interpreter),
new AppOptedIn([], 8, interpreter),
new AppLocalGet([], 9, interpreter),
new AppLocalGetEx([], 10, interpreter),
new AppGlobalGet([], 11, interpreter),
new AppGlobalGetEx([], 12, interpreter),
new AppLocalPut([], 13, interpreter),
new AppGlobalPut([], 14, interpreter),
new AppLocalDel([], 15, interpreter),
new AppGlobalDel([], 16, interpreter)
];
assert.deepEqual(res, expected);
});
});
describe("Gas cost of Opcodes from TEAL file", () => {
useFixture("teal-files");
let interpreter: Interpreter;
beforeEach(function () {
interpreter = new Interpreter();
});
it("Should return correct gas cost for 'Crypto opcodes' for tealversion 1", async () => {
interpreter.tealVersion = 1; // by default the version is also 1
let op = opcodeFromSentence(["sha256"], 1, interpreter);
assert.equal(interpreter.gas, 7);
interpreter.gas = 0;
op = opcodeFromSentence(["keccak256"], 2, interpreter);
assert.equal(interpreter.gas, 26);
interpreter.gas = 0;
op = opcodeFromSentence(["sha512_256"], 3, interpreter);
assert.equal(interpreter.gas, 9);
interpreter.gas = 0;
// eslint-disable-next-line
op = opcodeFromSentence(["ed25519verify"], 4, interpreter);
assert.equal(interpreter.gas, 1900);
interpreter.gas = 0;
parser(getProgram(cryptoFile), ExecutionMode.SIGNATURE, interpreter);
assert.equal(interpreter.gas, 1942); // 7 + 26 + 9 + 1900
});
it("Should return correct gas cost for 'Crypto opcodes' for tealversion 2", async () => {
interpreter.tealVersion = 2;
let op = opcodeFromSentence(["sha256"], 1, interpreter);
assert.equal(interpreter.gas, 35);
interpreter.gas = 0;
op = opcodeFromSentence(["keccak256"], 2, interpreter);
assert.equal(interpreter.gas, 130);
interpreter.gas = 0;
op = opcodeFromSentence(["sha512_256"], 3, interpreter);
assert.equal(interpreter.gas, 45);
interpreter.gas = 0;
// eslint-disable-next-line
op = opcodeFromSentence(["ed25519verify"], 4, interpreter);
assert.equal(interpreter.gas, 1900);
interpreter.gas = 0;
parser(getProgram(cryptoFile), ExecutionMode.SIGNATURE, interpreter);
assert.equal(interpreter.gas, 2110); // 35 + 130 + 45 + 1900
});
// note: cost for cryto ops for teal version 2, 3 are same
it("Should return correct gas cost for 'Crypto opcodes' for tealversion 3", async () => {
interpreter.tealVersion = 3;
let op = opcodeFromSentence(["sha256"], 1, interpreter);
assert.equal(interpreter.gas, 35);
interpreter.gas = 0;
op = opcodeFromSentence(["keccak256"], 2, interpreter);
assert.equal(interpreter.gas, 130);
interpreter.gas = 0;
op = opcodeFromSentence(["sha512_256"], 3, interpreter);
assert.equal(interpreter.gas, 45);
interpreter.gas = 0;
// eslint-disable-next-line
op = opcodeFromSentence(["ed25519verify"], 4, interpreter);
assert.equal(interpreter.gas, 1900);
interpreter.gas = 0;
parser(getProgram(cryptoFile), ExecutionMode.SIGNATURE, interpreter);
assert.equal(interpreter.gas, 2110); // 35 + 130 + 45 + 1900
});
it("Should return correct gas cost for mix opcodes from teal files", async () => {
let file = "test-file-1.teal";
const mode = ExecutionMode.SIGNATURE;
parser(getProgram(file), mode, interpreter);
assert.equal(interpreter.gas, 3);
interpreter.gas = 0;
file = "test-file-3.teal";
parser(getProgram(file), mode, interpreter);
assert.equal(interpreter.gas, 3);
interpreter.gas = 0;
file = "test-file-4.teal";
parser(getProgram(file), mode, interpreter);
assert.equal(interpreter.gas, 3);
interpreter.gas = 0;
file = "test-label.teal";
parser(getProgram(file), mode, interpreter);
assert.equal(interpreter.gas, 0); // label has cost 0
interpreter.gas = 0;
file = "test-others.teal";
parser(getProgram(file), mode, interpreter);
assert.equal(interpreter.gas, 10);
interpreter.gas = 0;
file = "test-stateful.teal";
parser(getProgram(file), ExecutionMode.APPLICATION, interpreter);
assert.equal(interpreter.gas, 12);
});
it("Should throw error if total cost exceeds 20000", async () => {
const file = "test-max-opcost.teal"; // has cost 22800
expectRuntimeError(
() => parser(getProgram(file), ExecutionMode.SIGNATURE, interpreter),
RUNTIME_ERRORS.TEAL.MAX_COST_EXCEEDED
);
});
});
}); | the_stack |
'use strict';
import {TextDocument, TextDocumentContentChangeEvent, RemoteConsole, Position, Range, Diagnostic} from 'vscode-languageserver';
import * as vscode from 'vscode-languageserver';
import {CancellationToken} from 'vscode-jsonrpc';
import * as thmProto from './protocol';
import * as coqProto from './coqtop/coq-proto';
import * as coqParser from './parsing/coq-parser';
import * as textUtil from './util/text-util';
import {AnnotatedText, textToDisplayString} from './util/AnnotatedText';
import {CoqStateMachine, GoalResult, StateStatus} from './stm/STM';
import {FeedbackSync, DocumentFeedbackCallbacks} from './FeedbackSync';
import {SentenceCollection} from './sentence-model/SentenceCollection';
import {CoqProject} from './CoqProject';
/** vscode needs to export this class */
export interface TextDocumentItem {
uri: string;
languageId: string;
version: number;
text: string;
}
export interface MessageCallback {
sendMessage(level: string, message: AnnotatedText, routeId: coqProto.RouteId) : void;
}
export interface ResetCallback {
sendReset() : void;
}
export interface LtacProfCallback {
sendLtacProfResults(results: coqProto.LtacProfResults) : void;
}
export interface CoqtopStartCallback {
sendCoqtopStart() : void;
}
export interface CoqtopStopCallback {
sendCoqtopStop(reason: thmProto.CoqtopStopReason, message?: string);
}
export type DocumentCallbacks = MessageCallback & ResetCallback & LtacProfCallback & CoqtopStartCallback & CoqtopStopCallback & DocumentFeedbackCallbacks;
export class CoqDocument implements TextDocument {
// TextDocument
public get uri() { return this.document.uri };
public get languageId() { return this.document.languageId };
public get version() { return this.document.version };
public get lineCount() { return this.document.lineCount };
public getText() {
return this.document.getText();;
}
private project: CoqProject;
private stm: CoqStateMachine|null = null;
private clientConsole: RemoteConsole;
private callbacks : MessageCallback & ResetCallback & LtacProfCallback & CoqtopStartCallback & CoqtopStopCallback;
private document: SentenceCollection = null;
// Feedback destined for the extension client/view
private feedback : FeedbackSync;
private parsingRanges : Range[] = [];
// private interactionCommands = new AsyncWorkQueue();
// private interactionLoopStatus = InteractionLoopStatus.Idle;
// we'll use this as a callback, so protect it with an arrow function so it gets the correct "this" pointer
constructor(project : CoqProject, document: TextDocumentItem, clientConsole: RemoteConsole, callbacks: DocumentCallbacks) {
this.clientConsole = clientConsole;
this.document = new SentenceCollection(document);
this.callbacks = callbacks;
this.project = project;
this.feedback = new FeedbackSync(callbacks, 200);
if(project.settings.coqtop.startOn === "open-script")
this.resetCoq();
}
public async applyTextEdits(changes: TextDocumentContentChangeEvent[], newVersion: number) {
// sort the edits such that later edits are processed first
let sortedChanges =
changes.slice().sort((change1,change2) =>
textUtil.positionIsAfter(change1.range.start, change2.range.start) ? -1 : 1)
this.document.applyTextChanges(newVersion, changes);
if(this.isStmRunning()) {
try {
this.stm.applyChanges(sortedChanges, newVersion, this.document.getText());
} catch (err) {
this.clientConsole.error("STM crashed while applying text edit: " + err.toString())
}
this.updateHighlights();
this.updateDiagnostics();
}
if(this.isStmRunning() && this.project.settings.coq.diagnostics && this.project.settings.coq.diagnostics.checkTextSynchronization) {
const documentText = this.document.getText();
const parsedSentencesText = this.document.getSentenceText();
await this.stm.flushEdits();
const stmText = this.stm.getStatesText();
if(!documentText.startsWith(parsedSentencesText) && this.document.getDocumentVersion() === newVersion) {
console.error("Document text differs from parsed-sentences text");
console.error("On applied changes: ");
changes.forEach(change => {
console.error(" > " + textUtil.rangeToString(change.range) + " -> " + change.text);
})
}
if(!documentText.startsWith(stmText) && this.stm.getDocumentVersion() === newVersion) {
console.error("Document text differs from STM text");
console.error("On applied changes: ");
changes.forEach(change => {
console.error(" > " + textUtil.rangeToString(change.range) + " -> " + change.text);
})
}
}
}
public getSentences() : SentenceCollection {
return this.document;
}
public getSentencePrefixTextAt(pos: Position) {
return this.document.getSentencePrefixTextAt(pos);
}
public offsetAt(pos: Position) : number {
return this.document.offsetAt(pos);
}
/**
* @returns the Position (line, column) for the location (character position)
*/
public positionAt(offset: number) : Position {
return this.document.positionAt(offset);
}
// private sentenceStatusToHighlightType(status: coqProto.SentenceStatus) : thmProto.HighlightType {
// switch(status) {
// case coqProto.SentenceStatus.Complete:
// return thmProto.HighlightType.Complete;
// case coqProto.SentenceStatus.Incomplete:
// return thmProto.HighlightType.Incomplete;
// case coqProto.SentenceStatus.InProgress:
// return thmProto.HighlightType.InProgress;
// case coqProto.SentenceStatus.Parsed:
// return thmProto.HighlightType.Parsing;
// case coqProto.SentenceStatus.Processed:
// return thmProto.HighlightType.Processed;
// case coqProto.SentenceStatus.ProcessingInput:
// return thmProto.HighlightType.Processing;
// }
// }
// private highlightTypeToSentenceStatus(type: thmProto.HighlightType) : coqProto.SentenceStatus {
// switch(type) {
// case thmProto.HighlightType.Complete:
// return coqProto.SentenceStatus.Complete;
// case thmProto.HighlightType.Incomplete:
// return coqProto.SentenceStatus.Incomplete;
// case thmProto.HighlightType.InProgress:
// return coqProto.SentenceStatus.InProgress;
// case thmProto.HighlightType.Parsing:
// return coqProto.SentenceStatus.Parsed;
// case thmProto.HighlightType.Processed:
// return coqProto.SentenceStatus.Processed;
// case thmProto.HighlightType.Processing:
// return coqProto.SentenceStatus.ProcessingInput;
// default:
// throw `Cannot convert ${thmProto.HighlightType[type]} to a SentenceStatus`
// }
// }
// private highlightSentence(sentence: Range, type: thmProto.HighlightType) : thmProto.Highlight {
// // if(type===undefined)
// // type = this.sentenceStatusToHighlightType(sentence.status);
// return { style: type, range: sentence };
// }
private sentenceToHighlightType(status: StateStatus) : thmProto.HighlightType {
switch(status) {
case StateStatus.Axiom: return thmProto.HighlightType.Axiom;
case StateStatus.Error: return thmProto.HighlightType.StateError;
case StateStatus.Parsing: return thmProto.HighlightType.Parsing;
case StateStatus.Processing: return thmProto.HighlightType.Processing;
case StateStatus.Incomplete: return thmProto.HighlightType.Incomplete;
case StateStatus.Processed: return thmProto.HighlightType.Processed;
}
}
/** creates the current highlights from scratch */
private createHighlights() : thmProto.Highlights {
const highlights : thmProto.Highlights =
{ ranges: [ [], [], [], [], [], [] ] };
if(!this.isStmRunning())
return highlights;
for(let sent of this.stm.getSentences()) {
const ranges = highlights.ranges[this.sentenceToHighlightType(sent.status)];
if(ranges.length > 0 && textUtil.positionIsEqual(ranges[ranges.length-1].end, sent.range.start))
ranges[ranges.length-1].end = sent.range.end;
else {
ranges.push(Range.create(sent.range.start,sent.range.end));
}
}
return highlights;
}
private onCoqStateStatusUpdate(range: Range, status: StateStatus) {
this.updateHighlights();
}
private onClearSentence(range: Range) {
// this.updateHighlights();
}
private updateHighlights(now = false) {
this.feedback.updateHighlights(() => {
const highlights = this.createHighlights();
const parsingRanges = highlights.ranges[thmProto.HighlightType.Parsing];
Array.prototype.push.apply(parsingRanges, this.parsingRanges);
return highlights;
}, now);
}
private onCoqStateError(sentenceRange: Range, errorRange: Range, message: AnnotatedText) {
this.updateHighlights();
this.updateDiagnostics()
// this.addDiagnostic(
// { message: message
// , range: errorRange
// , severity: DiagnosticSeverity.Error
// });
}
private onCoqMessage(level: coqProto.MessageLevel, message: AnnotatedText, routeId: coqProto.RouteId) {
this.callbacks.sendMessage(coqProto.MessageLevel[level], message, routeId);
}
private onCoqStateLtacProf(range: Range, results: coqProto.LtacProfResults) {
this.callbacks.sendLtacProfResults(results);
}
private async onCoqDied(reason: thmProto.CoqtopStopReason, error?: string) {
this.callbacks.sendCoqtopStop(reason, error);
if(error) {
this.resetCoq();
this.callbacks.sendReset();
}
}
public async resetCoq() {
if(this.isStmRunning())
this.stm.shutdown(); // Don't bother awaiting
this.stm = new CoqStateMachine(
this.project,
() => {
this.callbacks.sendCoqtopStart();
return this.project.createCoqTopInstance(this.uri);
}, {
sentenceStatusUpdate: (x1,x2) => this.onCoqStateStatusUpdate(x1,x2),
clearSentence: (x1) => this.onClearSentence(x1),
updateStmFocus: (x1) => this.onUpdateStmFocus(x1),
error: (x1,x2,x3) => this.onCoqStateError(x1,x2,x3),
message: (x1,x2,x3) => this.onCoqMessage(x1,x2,x3),
ltacProfResults: (x1,x2) => this.onCoqStateLtacProf(x1,x2),
coqDied: (reason: thmProto.CoqtopStopReason, error?: string) => this.onCoqDied(reason, error),
});
}
private onUpdateStmFocus(focus: Position) {
this.feedback.updateFocus(focus, false);
}
// private async cancellableOperation<T>(operation: Thenable<T>) : Promise<T> {
// return await Promise.race<T>(
// [ operation
// , this.cancelProcessing.event.then(() => Promise.reject<T>('operation cancelled'))
// ]);
// }
/** generates a list of contiguous commands
* @param begin: where to start parsing commands
* @param endOffset: if specified, stop at the last command to not exceed the offset
*/
private *commandSequenceGenerator(begin: Position, end?: Position, highlight: boolean = false) : IterableIterator<{text: string, range: Range}> {
const documentText = this.document.getText();
let endOffset : number;
if(end === undefined)
endOffset = documentText.length;
else
endOffset = Math.min(this.offsetAt(end), documentText.length);
let currentOffset = this.offsetAt(begin);
if(currentOffset >= endOffset)
return;
while(true) {
const commandLength = coqParser.parseSentenceLength(documentText.substr(currentOffset, endOffset))
const nextOffset = currentOffset + commandLength;
if(commandLength > 0 || nextOffset > endOffset) {
let result =
{ text: documentText.substring(currentOffset, nextOffset)
, range: Range.create(this.positionAt(currentOffset),this.positionAt(nextOffset))
};
yield result;
// only highlight if the command was accepted (i.e. another is going to be request; i.e. after yield)
if (highlight) {// Preliminary "parsing" highlight
this.parsingRanges.push(result.range);
this.updateHighlights(true);
}
} else
return;
currentOffset = nextOffset;
}
}
private commandSequence(highlight=false) {
return (begin,end?) => this.commandSequenceGenerator(begin,end,highlight);
}
// /**
// * @param currentSentence: where to start parsing the next sentence
// * @param maxOffset: do not parse past maxOffset
// * @returns the next parsed sentence OR else null if parsing exceeds @maxOffset
// */
// private async plainStepForward(maxOffset?: number) : Promise<StepResult> {
// const start = this.stm.getFocusedPosition();
// const startOffset = this.offsetAt(start);
// const docText = this.documentText;
// const sentenceLength = coqParser.parseSentence(this.documentText.substr(startOffset,maxOffset));
// if(sentenceLength == -1)
// return StepResult.NoMoreCommands;
// const stopPos = startOffset + sentenceLength;
// if(maxOffset!==undefined && stopPos > maxOffset)
// return StepResult.ExceedsMaxOffset;
// const range = Range.create(start,this.positionAt(stopPos));
// let command = docText.substring(startOffset, stopPos);
// // Preliminary "parsing" highlight
// const parsingHighlights = [
// { style: thmProto.HighlightType.Parsing, textBegin: startOffset, textEnd: stopPos }
// ];
// this.callbacks.sendHighlightUpdates(parsingHighlights);
// try {
// const unfocused = await this.stm.stepForward(command, range, this.version, true);
// return unfocused ? StepResult.Unfocused : StepResult.Focused;
// } catch(err) {
// const error = <CommandParseError>err;
// const highlights = [
// { style: thmProto.HighlightType.Clear, textBegin: startOffset, textEnd: stopPos }
// // { style: thmProto.HighlightType.SyntaxError, textBegin: errorEnd, textEnd: errorEnd },
// ];
// this.callbacks.sendHighlightUpdates(highlights);
// this.addDiagnostic({
// message: error.message,
// range: error.range,
// severity: DiagnosticSeverity.Error
// });
// throw error;
// }
// }
// private async addDiagnostic(diagnostic: Diagnostic) {
// const diag = diagnostic;
// diag.message = await richppToMarkdown(diag.message);
// this.diagnostics.push(diag);
// this.callbacks.sendDiagnostics(this.diagnostics);
// }
// private removeDiagnosticsContaining(pos: Position, sendUpdate?: boolean) {
// this.diagnostics = this.diagnostics
// .filter((d) => !textUtil.rangeContains(d.range, pos));
// if(sendUpdate === undefined || sendUpdate===true)
// this.callbacks.sendDiagnostics(this.diagnostics);
// }
// private removeDiagnosticsIntersecting(range: Range, sendUpdate?: boolean) {
// this.diagnostics = this.diagnostics
// .filter((d) => !textUtil.rangeTouches(d.range, range));
// if(sendUpdate === undefined || sendUpdate===true)
// this.callbacks.sendDiagnostics(this.diagnostics);
// }
// private shiftDiagnostics(delta: textUtil.RangeDelta) {
// for(let idx = 0; idx < this.diagnostics.length; ++idx) {
// this.diagnostics[idx].range = textUtil.rangeTranslate(this.diagnostics[idx].range, delta);
// }
// }
// private clearSentenceHighlight(sentence: Sentence, endSentence?: Sentence) {
// this.callbacks.sendHighlightUpdates([{
// style: thmProto.HighlightType.Clear,
// textBegin: sentence.textBegin,
// textEnd: endSentence ? endSentence.textEnd : sentence.textEnd
// }]);
// }
// private clearSentenceHighlightAfter(sentence: Sentence, endSentence?: Sentence) {
// this.callbacks.sendHighlightUpdates([{
// style: thmProto.HighlightType.Clear,
// textBegin: sentence.textEnd,
// textEnd: endSentence ? endSentence.textEnd : sentence.textEnd
// }]);
// }
// /** Interpret to point
// * Tell Coq to process the proof script up to the given point
// * This may not fully process everything, or it may rewind the state.
// */
// private async interpretToPoint(position: Position) : Promise<thmProto.CoqTopGoalResult> {
// try {
// do {
// const focus = this.stm.getFocusedPosition();
// const focusOffset = this.offsetAt(focus);
// const offset = this.offsetAt(position);
// if(textUtil.positionIsAfterOrEqual(position, focus)) {
// // We need to step forward to reach the location.
// // We might be focused in the middle of a proof, so even if there is a
// // closer state we can jump to, we cannot call coqEditAt just yet.
// // (Or else we will get a Coq anomally :/ )
// for(let command of this.commandSequence(focus,offset)) {
// const focusChanged = this.stm.stepForward(command.text, command.range, this.version, true);
// if(focusChanged)
// break;
// }
// // At this point, either we have reached the location we're looking for,
// // or else the proof has become unfocused (the current state might be
// // anywhere) and we will need to call coqEditAt to get closer to the location.
// const closestSentence = this.sentences.findPrecedingSentence(location);
// // Are we at the closest sentence?
// if(forwardSentence.stateId !== closestSentence.stateId) {
// // No; jump there
// await this.jumpToLocation(closestSentence);
// }
// // We can now step forward directly to the location
// return await this.interpretToEnd(location);
// } else {
// // Our desired location is above us; we'll have to jump there
// const closestSentence = this.sentences.findPrecedingSentence(location);
// await this.jumpToLocation(closestSentence);
// return await this.rawGetGoal();
// }
// }
// } catch(error) {
// return this.errorGoalResult(error);
// }
// }
// private errorGoalResult(error: FailureResult) : thmProto.CoqTopGoalResult {
// const e = <coqProto.FailValue>{
// message: error.message,
// range: error.range
// };
// return {error: e};
// }
// /**
// *
// * */
// private async interpretToEnd(maxOffset?: number) : Promise<thmProto.CoqTopGoalResult> {
// let currentSentence = this.sentences.getTip();
// try {
// await this.stepForwardUntil(maxOffset);
// return await this.rawGetGoal();
// } catch(error) {
// return this.errorGoalResult(error);
// }
// }
// private async rollbackState(startingSentence: Sentence, endSentence?: Sentence) {
// if(this.sentences.getTip().stateId !== startingSentence.stateId) {
// // Undo the sentence
// this.clientConsole.log("rolling back state");
// await this.coqTop.coqEditAt(startingSentence.stateId);
// this.sentences.rewindTo(startingSentence);
// if(endSentence !== undefined)
// this.clearSentenceHighlightAfter(startingSentence,endSentence);
// this.clientConsole.log("rolled back");
// }
// }
// private async stepForward() : Promise<thmProto.CoqTopGoalResult> {
// const currentSentence = this.sentences.getTip();
// try {
// const interp = await this.plainStepForward(currentSentence);
// if(!interp)
// return {}
// return await this.rawGetGoal(interp.nextSentence ? interp.nextSentence.stateId : undefined);
// } catch(error) {
// this.rollbackState(currentSentence);
// return this.errorGoalResult(error);
// }
// }
// /**
// *
// * */
// private async stepBackward() : Promise<thmProto.CoqTopGoalResult> {
// // grab the tip sentence
// const currentSentence = this.sentences.getTip();
// try {
// const prevSentence = this.sentences.getPredecessor(currentSentence);
// if(prevSentence == null) {
// await this.doResetCoq();
// return {};
// }
// await this.coqTop.coqEditAt(prevSentence.stateId);
// this.sentences.rewindTo(prevSentence);
// this.callbacks.sendHighlightUpdates([
// this.highlightSentence(currentSentence, thmProto.HighlightType.Clear)
// ]);
// return await this.rawGetGoal(prevSentence.stateId);
// } catch(err) {
// const error = <FailureResult>err;
// const beforeErrorSentence = this.sentences.get(error.stateId);
// await this.coqTop.coqEditAt(error.stateId);
// this.clearSentenceHighlightAfter(beforeErrorSentence,currentSentence);
// this.sentences.rewindTo(beforeErrorSentence);
// return await this.getGoal();
// }
// }
public async dispose() {
if(this.isStmRunning()) {
await this.stm.shutdown();
this.stm = null;
}
}
// private async protectOperation(op: (wasReset:boolean)=>Promise<thmProto.CoqTopGoalResult>, lazyInitialize?: boolean) : Promise<thmProto.CoqTopGoalResult> {
// lazyInitialize = (lazyInitialize===undefined) ? true : false;
// let unlock : () => Promise<void>;
// try {
// unlock = await this.processingLock.lock(this.cancelProcessing.event);
// } catch(reason) {
// return <coqProto.FailValue>{message: "operation cancelled"};
// }
// try {
// if(!this.coqTop.isRunning()) {
// if(!lazyInitialize)
// return {};
// await this.cancellableOperation(this.doResetCoq());
// const result = await this.cancellableOperation(op(true));
// } else
// return await this.cancellableOperation(op(false));
// } catch(reason) {
// return <coqProto.FailValue>{message: reason};
// } finally {
// unlock();
// }
// }
// private interrupt() {
// this.coqTop.coqInterrupt();
// }
// /**
// * This loop handles each coq command and text edit sequentially.
// * One of the requirements is that a command's document position is still valid when it returns so that we can report accurate error messages, so text edits that arrive while a command is being processed are delayed until the command finished so that we do not invalidate its document positions.
// *
// * To cancel the current queue of commands, call cancelCoqOperations()
// */
// private async interactionLoop() {
// while(true) {
// try {
// await this.interactionCommands.executeOneTask();
// } catch(error) {
// this.clientConsole.warn(`Interaction loop exception: ${error}`);
// } finally {
// }
// }
// }
// /**
// * Ensures that the text edits are applied *after* the currently scheduled operations; this delay prevents their document positions from being invalidated too soon
// * However, if the edit will result in changing an already-interpreted sentence, then all current Coq processing will be cancelled.
// * Text edits themselves cannot be cancelled, but the Coq operations they may perform to set the current editing positions *can* be cancelled.
// */
// public textEdit(changes: TextDocumentContentChangeEvent[]) {
// // If any of the edits affect an interpreted sentence, then interrupt and cancel all Coq operations
// for(const change of changes) {
// const beginOffset = this.offsetAt(change.range.start);
// const endOffset = beginOffset + change.rangeLength;
// // Have any sentences been edited?
// const rangeSent = this.sentences.getRangeAffected(beginOffset,endOffset);
// if(!this.isPassiveEdit(rangeSent,change, beginOffset, endOffset) && rangeSent.length) {
// //this.clientConsole.info("Cancelling current Coq operations due to editing text of interpreted statements.");
// this.cancelCoqOperations();
// break;
// }
// }
// const cancelSignal = this.cancelProcessing;
// return this.interactionCommands.process<void>(async () => {
// this.interactionLoopStatus = InteractionLoopStatus.TextEdit;
// try {
// // applyTextEdits will check for a cancellation signal during Coq calls, but text-editing itself should never be cancelled
// return await this.applyTextEdits(changes, cancelSignal);
// } finally {
// this.interactionLoopStatus = InteractionLoopStatus.Idle;
// }
// });
// }
// private updateComputingStatus(status: thmProto.ComputingStatus, startTime: [number,number]) {
// const duration = process.hrtime(startTime);
// const interval = duration[0] * 1000.0 + (duration[1] / 1000000.0);
// this.callbacks.sendComputingStatus(status, interval);
// }
// private async doCoqOperation<X>(task: ()=>Promise<X>, lazyInitializeCoq? : boolean) {
// lazyInitializeCoq = (lazyInitializeCoq===undefined) ? true : lazyInitializeCoq;
// if(!this.coqTop.isRunning()) {
// if(lazyInitializeCoq) {
// await this.doResetCoq();
// } else
// return {};
// }
// return await task();
// }
// private enqueueCoqOperation<X>(task: ()=>Promise<X>, lazyInitializeCoq? : boolean) {
// // this.cancelProcessing might change in the future, so we want to make sure that, when
// // the task is eventually run, it will use the CURRENT this.cancelProcessing
// const cancelSignal = this.cancelProcessing;
// return this.interactionCommands.process<X>(async () => {
// if(cancelSignal.isCancelled())
// return Promise.reject<X>(<coqProto.FailValue>{message: 'operation cancelled'})
// this.interactionLoopStatus = InteractionLoopStatus.CoqCommand;
// const startTime = process.hrtime();
// const statusCheck = setInterval(() => this.updateComputingStatus(thmProto.ComputingStatus.Computing, startTime), 500);
// var interrupted = false;
// try {
// return await Promise.race<X>(
// [ this.doCoqOperation(task, lazyInitializeCoq)
// , cancelSignal.event.then(() => Promise.reject<X>(<coqProto.FailValue>{message: 'operation cancelled'}))
// ]);
// } catch(error) {
// this.updateComputingStatus(thmProto.ComputingStatus.Interrupted, startTime);
// interrupted = true;
// throw error;
// } finally {
// this.interactionLoopStatus = InteractionLoopStatus.Idle;
// clearInterval(statusCheck);
// if(!interrupted)
// this.updateComputingStatus(thmProto.ComputingStatus.Finished, startTime);
// }
// });
// }
// /**
// * Cancels all coq commands that are associated with `cancelProcessing`, which should be every coq command in `interactionCommands`.
// * If a text edit invalidates a state, then this method should also be called.
// */
// private cancelCoqOperations() : Promise<void> {
// // Cancel all current and pending operations
// this.cancelProcessing.cancel();
// // Do not cancel subsequent operations
// this.cancelProcessing = new CancellationSignal();
// if(this.interactionLoopStatus === InteractionLoopStatus.CoqCommand)
// return this.coqTop.coqInterrupt();
// }
// private async interactionsCoqQuit() {
// const waitMS = 1000;
// const cancelling = this.cancelCoqOperations();
// try {
// await Promise.race<{}>([cancelling, new Promise((resolve,reject) => setTimeout(() => reject(), waitMS))]);
// } finally {
// await this.coqTop.coqQuit();
// }
// }
// private async interactionsCoqReset() {
// const waitMS = 1000;
// const cancelling = this.cancelCoqOperations();
// try {
// await Promise.race<{}>([cancelling, new Promise((resolve,reject) => setTimeout(() => reject(), waitMS))]);
// } finally {
// await this.doResetCoq();
// }
// }
/** Make sure that the STM is running */
private assertStm() {
if(!this.isStmRunning())
this.resetCoq();
}
// private convertErrorToCommandResult(error: any) : thmProto.FailureResult {
// if(error instanceof Interrupted) {
// return undefined;
// } else if(error instanceof CoqtopError) {
// } else if(error instanceof CallFailure) {
// return Object.assign<thmProto.FailureResult,thmProto.FocusPosition>({type: 'failure', message: error.message, range: error.range, sentence: error.stateId}, {focus: this.stm.getFocusedPosition()})
// else
// throw error;
// }
private toGoal(goal: GoalResult) : thmProto.CommandResult {
if(goal.type === 'not-running')
return goal;
else if(!this.isStmRunning())
return {type: 'not-running', reason: 'not-started'};
// This is silly (Typescript is not yet smart enough)
return {focus: this.stm.getFocusedPosition(), ...goal};
// export type GoalResult = proto.NoProofTag | proto.NotRunningTag |
// (proto.FailValue & proto.FailureTag) |
// (proto.ProofView & proto.ProofViewTag) |
// (proto.CommandInterrupted & proto.InterruptedTag)
// export type FocusPosition = {focus: vscode.Position}
// export type NotRunningTag = {type: 'not-running'}
// export type NoProofTag = {type: 'no-proof'}
// export type FailureTag = {type: 'failure'}
// export type ProofViewTag = {type: 'proof-view'}
// export type InterruptedTag = {type: 'interrupted'}
// export type NotRunningResult = NotRunningTag
// export type NoProofResult = NoProofTag & FocusPosition
// export type FailureResult = FailValue & FailureTag & FocusPosition
// export type ProofViewResult = ProofView & ProofViewTag & FocusPosition
// export type InterruptedResult = CommandInterrupted & InterruptedTag & FocusPosition
// export type CommandResult = NotRunningTag | FailureResult | ProofViewResult | InterruptedResult | NoProofResult
}
private updateDiagnostics(now = false) {
if(!this.isStmRunning())
return;
this.feedback.updateDiagnostics(() => {
const diagnostics : Diagnostic[] = [];
for(let d of this.stm.getDiagnostics()) {
var range : Range = d.sentence;
if(d.range) {
range = d.range;
}
diagnostics.push(Diagnostic.create(range,textToDisplayString(d.message),d.severity,undefined,'coqtop'))
}
diagnostics.push(...Array.from(this.document.getErrors()));
return diagnostics;
}, now);
}
public async stepForward(token: CancellationToken) : Promise<thmProto.CommandResult> {
this.assertStm();
try {
this.parsingRanges = [];
const error = await this.stm.stepForward(this.commandSequence(true));
if(error)
return error
return this.toGoal(await this.stm.getGoal());
} finally {
this.parsingRanges = [];
this.updateHighlights(true);
this.updateDiagnostics(true);
}
}
public async stepBackward(token: CancellationToken) : Promise<thmProto.CommandResult> {
this.assertStm();
try {
const error = await this.stm.stepBackward();
if(error)
return error;
return this.toGoal(await this.stm.getGoal());
} finally {
this.updateHighlights(true);
this.updateDiagnostics(true);
}
}
public async interpretToPoint(location: number|vscode.Position, synchronous = false, token: CancellationToken) : Promise<thmProto.CommandResult> {
this.assertStm();
try {
const pos = (typeof location === 'number') ? this.positionAt(location) : location;
this.parsingRanges = [Range.create(this.stm.getFocusedPosition(),pos)];
this.updateHighlights(true);
const error = await this.stm.interpretToPoint(pos,this.commandSequence(false), this.project.settings.coq.interpretToEndOfSentence, synchronous, token);
if(error)
return error;
return this.toGoal(await this.stm.getGoal());
} finally {
this.parsingRanges = [];
this.updateHighlights(true);
this.updateDiagnostics(true);
}
}
public async interpretToEnd(synchronous = false, token: CancellationToken) : Promise<thmProto.CommandResult> {
return await this.interpretToPoint(this.document.getText().length,synchronous,token);
}
public async getGoal() : Promise<thmProto.CommandResult> {
if(!this.isStmRunning())
return {type: 'not-running', reason: "not-started"};
try {
return this.toGoal(await this.stm.getGoal());
} finally {
this.updateDiagnostics(true);
}
}
public async getCachedGoal(pos: vscode.Position, direction: "preceding"|"subsequent") : Promise<thmProto.CommandResult> {
if(!this.isStmRunning())
return {type: 'not-running', reason: "not-started"};
try {
return this.toGoal(await this.stm.getCachedGoal(pos, direction));
} finally {
this.updateDiagnostics(true);
}
}
public async getStatus(force: boolean) : Promise<thmProto.CommandResult> {
if(!this.isStmRunning())
return {type: 'not-running', reason: "not-started"};
try {
return await this.stm.getStatus(force);
} finally {
this.updateDiagnostics(true);
}
}
public async finishComputations() {
if(this.isStmRunning())
this.stm.finishComputations();
}
public async query(query: "locate"|"check"|"print"|"search"|"about"|"searchAbout", term: string, routeId: coqProto.RouteId) {
if(!this.isStmRunning())
return "Coq is not running";
switch(query) {
case "locate":
try {
return await this.stm.doQuery(`Locate ${term}.`, routeId);
} catch(err) {
return await this.stm.doQuery(`Locate "${term}".`, routeId);
}
case "check": return await this.stm.doQuery(`Check ${term}.`, routeId)
case "print": return await this.stm.doQuery(`Print ${term}.`, routeId)
case "search": return await this.stm.doQuery(`Search ${term}.`, routeId)
case "about": return await this.stm.doQuery(`About ${term}.`, routeId)
case "searchAbout": return await this.stm.doQuery(`SearchAbout ${term}.`, routeId)
}
}
public async setWrappingWidth(columns: number) {
if(!this.isStmRunning())
return;
await this.stm.setWrappingWidth(columns);
}
public async requestLtacProfResults(offset?: number) {
if(!this.isStmRunning())
return;
await this.stm.requestLtacProfResults(offset ? this.positionAt(offset) : undefined);
}
public async interrupt() {
if(!this.isStmRunning())
return;
this.stm.interrupt();
}
public async quitCoq() {
if(!this.isStmRunning())
return;
await this.stm.shutdown();
this.stm.dispose();
this.stm = null;
}
public async setDisplayOptions(options: {item: thmProto.DisplayOption, value: thmProto.SetDisplayOption}[]) {
if(!this.isStmRunning())
return;
this.stm.setDisplayOptions(options);
}
public isStmRunning() : boolean {
return this.stm && this.stm.isRunning();
}
} | the_stack |
import React, {
useEffect,
useState,
useCallback,
useMemo,
useImperativeHandle,
forwardRef
} from 'react'
import moment, { Moment } from 'moment'
import { Form, Row, Col, Input, Select, DatePicker, Spin, Checkbox } from 'antd'
const FormItem = Form.Item
const { TextArea } = Input
const { Option } = Select
const { RangePicker } = DatePicker
import { FormComponentProps } from 'antd/lib/form'
import {
SchedulePeriodUnit,
ISchedule,
ICronExpressionPartition,
JobType
} from '../types'
import { CheckboxChangeEvent } from 'antd/lib/checkbox'
import { FormItemStyle, LongFormItemStyle } from '../constants'
import Styles from './ScheduleBaseConfig.less'
const periodUnitList: SchedulePeriodUnit[] = [
'Minute',
'Hour',
'Day',
'Week',
'Month',
'Year'
]
const periodUnitListLocale: { [key in SchedulePeriodUnit]: string } = {
Minute: '分钟',
Hour: '小时',
Day: '天',
Week: '周',
Month: '月',
Year: '年'
}
const minutePeriodOptions = [...Array(50).keys()].map((s) => (
<Option key={s + 10} value={s + 10}>
{s + 10}
</Option>
))
const minuteOptions = [...Array(60).keys()].map((m) => (
<Option key={m} value={m}>
{`0${m}`.slice(-2)} 分
</Option>
))
const hourOptions = [...Array(24).keys()].map((h) => (
<Option key={h} value={h}>
{`0${h}`.slice(-2)} 时
</Option>
))
const dayOptions = [...Array(31).keys()].map((d) => (
<Option key={d + 1} value={d + 1}>
{`0${d + 1}`.slice(-2)} 日
</Option>
))
const weekOptions = [
'星期天',
'星期一',
'星期二',
'星期三',
'星期四',
'星期五',
'星期六'
].map((w, idx) => (
<Option key={idx + 1} value={idx + 1}>
{w}
</Option>
))
const monthOptions = [...Array(12).keys()].map((m) => (
<Option key={m + 1} value={m + 1}>
{`0${m + 1}`.slice(-2)}月
</Option>
))
export type ScheduleBaseFormProps = ISchedule &
ICronExpressionPartition & {
dateRange: [Moment, Moment]
setCronExpressionManually: boolean
}
interface IScheduleBaseConfigProps
extends FormComponentProps<ScheduleBaseFormProps> {
schedule: ISchedule
loading: boolean
onCheckUniqueName: (
data: any,
resolve: () => any,
reject: (error: string) => any
) => any
onChangeJobType: (data: any) => any
}
const computePeriodUnit = (cronExpression: string) => {
const partitions = cronExpression.split(' ')
const stars = partitions.filter((item) => item === '*').length
let periodUnit: SchedulePeriodUnit = 'Minute'
switch (stars) {
case 3:
periodUnit = partitions[1].includes('/') ? 'Minute' : 'Hour'
break
case 2:
periodUnit = 'Day'
break
case 1:
periodUnit = partitions[partitions.length - 1] === '?' ? 'Month' : 'Week'
break
case 0:
periodUnit = 'Year'
break
}
return periodUnit
}
export const ScheduleBaseConfig: React.FC<IScheduleBaseConfigProps> = (
props,
ref
) => {
const { form, schedule, loading, onCheckUniqueName, onChangeJobType } = props
const { cronExpression, config } = schedule
const [currentPeriodUnit, setCurrentPeriodUnit] = useState<
SchedulePeriodUnit
>(computePeriodUnit(cronExpression))
const [manual, setManual] = useState(config.setCronExpressionManually)
const checkNameUnique = useCallback(
(_, name = '', callback) => {
const { id, projectId } = schedule
const data = { id, name, projectId }
if (!name) {
callback()
}
onCheckUniqueName(
data,
() => {
callback()
},
(err) => callback(err)
)
},
[onCheckUniqueName, schedule]
)
const changeJobType = useCallback(
(value: JobType) => {
onChangeJobType(value)
},
[onChangeJobType]
)
const changeManual = useCallback((e: CheckboxChangeEvent) => {
setManual(e.target.checked)
}, [])
useEffect(() => {
const periodUnit = computePeriodUnit(cronExpression)
setCurrentPeriodUnit(periodUnit)
}, [cronExpression])
useEffect(() => {
setManual(config.setCronExpressionManually)
}, [config.setCronExpressionManually])
let { minute, hour, day, month, weekDay } = useMemo<
Partial<ScheduleBaseFormProps>
>(() => {
const partitions = cronExpression.split(' ')
let minute =
form.getFieldValue('minute') ||
+(partitions[1].includes('/')
? partitions[1].slice(2) // slice(2) to remove */
: partitions[1])
// min minute duration is 10
if (currentPeriodUnit === 'Minute' && minute < 10) {
minute = 10
form.setFieldsValue({ minute })
}
const hour = +partitions[2] || 0
const day = +partitions[3] || 1
const month = +partitions[4] || 1
const weekDay = +partitions[5] || 1
return { minute, hour, day, month, weekDay }
}, [cronExpression, currentPeriodUnit])
const { getFieldDecorator } = form
const { startDate, endDate } = schedule
useImperativeHandle(ref, () => ({ form }))
return (
<Form>
<Row>
<Col span={12}>
<FormItem label="名称" {...FormItemStyle} hasFeedback>
{getFieldDecorator<ScheduleBaseFormProps>('name', {
rules: [
{ required: true, message: '名称不能为空' },
{ validator: checkNameUnique }
],
initialValue: schedule.name
})(<Input autoComplete="new-name" />)}
</FormItem>
</Col>
<Col span={12}>
<FormItem label="类型" {...FormItemStyle}>
{getFieldDecorator<ScheduleBaseFormProps>('jobType', {
initialValue: schedule.jobType
})(
<Select onChange={changeJobType}>
<Option value="email">Email</Option>
<Option value="weChatWork">企业微信</Option>
</Select>
)}
</FormItem>
</Col>
</Row>
<FormItem label="描述" {...LongFormItemStyle}>
{getFieldDecorator<ScheduleBaseFormProps>('description', {
initialValue: schedule.description
})(<TextArea />)}
</FormItem>
<FormItem label="有效时间范围" {...LongFormItemStyle}>
{getFieldDecorator<ScheduleBaseFormProps>('dateRange', {
initialValue: [
startDate && moment(startDate),
endDate && moment(endDate)
]
})(
<RangePicker
style={{ width: '100%' }}
showTime
format="YYYY-MM-DD HH:mm:ss"
/>
)}
</FormItem>
<FormItem label="执行时间设置" {...LongFormItemStyle}>
{loading ? (
<Spin />
) : (
<Row className={Styles.cronSetting} gutter={8}>
{manual ? (
<Col span={12}>
{getFieldDecorator<ScheduleBaseFormProps>('cronExpression', {
rules: [{ required: true }],
initialValue: cronExpression
})(<Input placeholder="请输入cron表达式" />)}
</Col>
) : (
<>
<span>每</span>
{/* Minute */}
{currentPeriodUnit === 'Minute' && (
<>
{getFieldDecorator<ScheduleBaseFormProps>('minute', {
initialValue: minute
})(
<Select style={{ width: 80 }}>
{minutePeriodOptions}
</Select>
)}
</>
)}
{/** */}
{getFieldDecorator<ScheduleBaseFormProps>('periodUnit', {
initialValue: currentPeriodUnit
})(
<Select
style={{ width: 80 }}
onChange={(value: SchedulePeriodUnit) =>
setCurrentPeriodUnit(value)
}
>
{periodUnitList.map((unit) => (
<Option key={unit} value={unit}>
{periodUnitListLocale[unit]}
</Option>
))}
</Select>
)}
{/* Hour */}
{currentPeriodUnit === 'Hour' && (
<>
<span>的第</span>
{getFieldDecorator<ScheduleBaseFormProps>('minute', {
initialValue: minute
})(<Select style={{ width: 80 }}>{minuteOptions}</Select>)}
</>
)}
{/* Day */}
{currentPeriodUnit === 'Day' && (
<>
<span>的</span>
{getFieldDecorator<ScheduleBaseFormProps>('hour', {
initialValue: hour
})(<Select style={{ width: 80 }}>{hourOptions}</Select>)}
<span>:</span>
{getFieldDecorator<ScheduleBaseFormProps>('minute', {
initialValue: minute
})(<Select style={{ width: 100 }}>{minuteOptions}</Select>)}
</>
)}
{/* Week */}
{currentPeriodUnit === 'Week' && (
<>
{getFieldDecorator<ScheduleBaseFormProps>('weekDay', {
initialValue: weekDay
})(<Select style={{ width: 95 }}>{weekOptions}</Select>)}
<span>的</span>
{getFieldDecorator<ScheduleBaseFormProps>('hour', {
initialValue: hour
})(<Select style={{ width: 80 }}>{hourOptions}</Select>)}
<span>:</span>
{getFieldDecorator<ScheduleBaseFormProps>('minute', {
initialValue: minute
})(<Select style={{ width: 80 }}>{minuteOptions}</Select>)}
</>
)}
{/* Month */}
{currentPeriodUnit === 'Month' && (
<>
{getFieldDecorator<ScheduleBaseFormProps>('day', {
initialValue: day
})(<Select style={{ width: 80 }}>{dayOptions}</Select>)}
<span>的</span>
{getFieldDecorator<ScheduleBaseFormProps>('hour', {
initialValue: hour
})(<Select style={{ width: 80 }}>{hourOptions}</Select>)}
<span>:</span>
{getFieldDecorator('minute', { initialValue: minute })(
<Select style={{ width: 80 }}>{minuteOptions}</Select>
)}
</>
)}
{/* Year */}
{currentPeriodUnit === 'Year' && (
<>
{getFieldDecorator<ScheduleBaseFormProps>('month', {
initialValue: month
})(<Select style={{ width: 80 }}>{monthOptions}</Select>)}
{getFieldDecorator<ScheduleBaseFormProps>('day', {
initialValue: day
})(<Select style={{ width: 80 }}>{dayOptions}</Select>)}
<span>的</span>
{getFieldDecorator<ScheduleBaseFormProps>('hour', {
initialValue: hour
})(<Select style={{ width: 80 }}>{hourOptions}</Select>)}
<span>:</span>
{getFieldDecorator<ScheduleBaseFormProps>('minute', {
initialValue: minute
})(<Select style={{ width: 80 }}>{minuteOptions}</Select>)}
</>
)}
</>
)}
{getFieldDecorator<ScheduleBaseFormProps>(
'setCronExpressionManually',
{ initialValue: manual, valuePropName: 'checked' }
)(
<Checkbox className={Styles.manual} onChange={changeManual}>
手动输入
</Checkbox>
)}
</Row>
)}
</FormItem>
</Form>
)
}
export default Form.create<IScheduleBaseConfigProps>()(
forwardRef(ScheduleBaseConfig)
) | the_stack |
import React, { isValidElement } from 'react'
import {
Animated,
Dimensions,
LayoutChangeEvent,
Platform,
ScrollView,
StyleSheet,
Text,
TouchableOpacity,
View,
ViewStyle,
} from 'react-native'
import { Theme, WithTheme, WithThemeStyles } from '../style'
import { TabBarPropsType, TabData } from './PropsType'
import TabBarStyles, { TabBarStyle } from './style'
const WINDOW_WIDTH = Dimensions.get('window').width
export interface PropsType
extends TabBarPropsType,
WithThemeStyles<TabBarStyle> {
scrollValue?: any
tabStyle?: ViewStyle
tabsContainerStyle?: ViewStyle
/** default: false */
dynamicTabUnderlineWidth?: boolean
keyboardShouldPersistTaps?: boolean
}
export interface StateType {
_leftTabUnderline: Animated.Value
_widthTabUnderline: Animated.Value
_containerWidth: number
_tabContainerWidth: number
}
export class DefaultTabBar extends React.PureComponent<PropsType, StateType> {
static defaultProps = {
animated: true,
tabs: [],
goToTab: () => {},
activeTab: 0,
page: 5,
tabBarUnderlineStyle: {},
tabBarBackgroundColor: '#fff',
tabBarActiveTextColor: '',
tabBarInactiveTextColor: '',
tabBarTextStyle: {},
dynamicTabUnderlineWidth: false,
}
_tabsMeasurements: any[] = []
_tabContainerMeasurements: any
_containerMeasurements: any
_scrollView: ScrollView
constructor(props: PropsType) {
super(props)
this.state = {
_leftTabUnderline: new Animated.Value(0),
_widthTabUnderline: new Animated.Value(0),
_containerWidth: WINDOW_WIDTH,
_tabContainerWidth: WINDOW_WIDTH,
}
}
componentDidMount() {
this.props.scrollValue.addListener(this.updateView)
}
updateView = (offset: any) => {
const position = Math.floor(offset.value)
const pageOffset = offset.value % 1
const tabCount = this.props.tabs.length
const lastTabPosition = tabCount - 1
if (tabCount === 0 || offset.value < 0 || offset.value > lastTabPosition) {
return
}
if (
this.necessarilyMeasurementsCompleted(
position,
position === lastTabPosition,
)
) {
this.updateTabPanel(position, pageOffset)
this.updateTabUnderline(position, pageOffset, tabCount)
}
}
necessarilyMeasurementsCompleted(position: number, isLastTab: boolean) {
return (
this._tabsMeasurements[position] &&
(isLastTab || this._tabsMeasurements[position + 1]) &&
this._tabContainerMeasurements &&
this._containerMeasurements
)
}
updateTabPanel(position: number, pageOffset: number) {
const containerWidth = this._containerMeasurements.width
const tabWidth = this._tabsMeasurements[position].width
const nextTabMeasurements = this._tabsMeasurements[position + 1]
const nextTabWidth = (nextTabMeasurements && nextTabMeasurements.width) || 0
const tabOffset = this._tabsMeasurements[position].left
const absolutePageOffset = pageOffset * tabWidth
let newScrollX = tabOffset + absolutePageOffset
newScrollX -=
(containerWidth -
(1 - pageOffset) * tabWidth -
pageOffset * nextTabWidth) /
2
newScrollX = newScrollX >= 0 ? newScrollX : 0
if (Platform.OS === 'android') {
this._scrollView.scrollTo({ x: newScrollX, y: 0, animated: false })
} else {
const rightBoundScroll =
this._tabContainerMeasurements.width - this._containerMeasurements.width
newScrollX = newScrollX > rightBoundScroll ? rightBoundScroll : newScrollX
this._scrollView.scrollTo({ x: newScrollX, y: 0, animated: false })
}
}
updateTabUnderline(position: number, pageOffset: number, tabCount: number) {
const { dynamicTabUnderlineWidth } = this.props
if (position >= 0 && position <= tabCount - 1) {
if (dynamicTabUnderlineWidth) {
const nowLeft = this._tabsMeasurements[position].left
const nowRight = this._tabsMeasurements[position].right
const nextTabLeft = this._tabsMeasurements[position + 1].left
const nextTabRight = this._tabsMeasurements[position + 1].right
const newLineLeft =
pageOffset * nextTabLeft + (1 - pageOffset) * nowLeft
const newLineRight =
pageOffset * nextTabRight + (1 - pageOffset) * nowRight
this.state._leftTabUnderline.setValue(newLineLeft)
this.state._widthTabUnderline.setValue(newLineRight - newLineLeft)
} else {
const nowLeft = (position * this.state._tabContainerWidth) / tabCount
const nextTabLeft =
((position + 1) * this.state._tabContainerWidth) / tabCount
const newLineLeft =
pageOffset * nextTabLeft + (1 - pageOffset) * nowLeft
this.state._leftTabUnderline.setValue(newLineLeft)
}
}
}
onPress = (index: number) => {
const { goToTab, onTabClick, tabs } = this.props
// tslint:disable-next-line:no-unused-expression
onTabClick && onTabClick(tabs[index], index)
// tslint:disable-next-line:no-unused-expression
goToTab && goToTab(index)
}
renderTab = (
tab: TabData,
index: number,
width: number,
onLayoutHandler: any,
styles: ReturnType<typeof TabBarStyles>,
theme: Theme,
) => {
const {
tabBarActiveTextColor: activeTextColor,
tabBarInactiveTextColor: inactiveTextColor,
tabBarTextStyle: textStyle,
activeTab,
renderTab,
} = this.props
const isTabActive = activeTab === index
const textColor = isTabActive
? activeTextColor || theme.activeTextColor
: inactiveTextColor || theme.inactiveTextColor
return (
<TouchableOpacity
activeOpacity={1}
key={`${tab.title}_${index}`}
accessible
accessibilityRole="button"
onPress={() => this.onPress(index)}
onLayout={onLayoutHandler}>
<View
style={{
...StyleSheet.flatten(styles.tab),
...this.props.tabStyle,
width,
}}>
{renderTab ? (
renderTab(tab)
) : isValidElement(tab.title) ? (
tab.title
) : (
<Text
style={[
{
color: textColor,
...StyleSheet.flatten(styles.textStyle),
},
textStyle,
]}>
{tab.title}
</Text>
)}
</View>
</TouchableOpacity>
)
}
measureTab = (page: number, event: any) => {
const { x, width, height } = event.nativeEvent.layout
this._tabsMeasurements[page] = { left: x, right: x + width, width, height }
this.updateView({ value: this.props.scrollValue._value })
}
render() {
const {
tabs,
page = 0,
tabBarUnderlineStyle,
tabBarBackgroundColor,
tabsContainerStyle,
renderUnderline,
keyboardShouldPersistTaps,
} = this.props
return (
<WithTheme styles={this.props.styles} themeStyles={TabBarStyles}>
{(styles, theme) => {
const tabUnderlineStyle = {
position: 'absolute',
bottom: 0,
...StyleSheet.flatten(styles.underline),
...StyleSheet.flatten(tabBarUnderlineStyle),
}
const dynamicTabUnderline = {
left: this.state._leftTabUnderline,
width: this.state._widthTabUnderline,
}
const tabWidth =
this.state._containerWidth / Math.min(page, tabs.length)
const underlineProps = {
style: {
...dynamicTabUnderline,
...tabUnderlineStyle,
},
}
return (
<View
style={[
styles.container,
{
backgroundColor: tabBarBackgroundColor,
},
]}
onLayout={this.onContainerLayout}>
<ScrollView
ref={(scrollView: any) => {
this._scrollView = scrollView
}}
horizontal
showsHorizontalScrollIndicator={false}
showsVerticalScrollIndicator={false}
directionalLockEnabled
bounces={false}
scrollsToTop={false}
scrollEnabled={tabs.length > page}
keyboardShouldPersistTaps={keyboardShouldPersistTaps}
renderToHardwareTextureAndroid>
<View
style={[
styles.tabs,
{
...tabsContainerStyle,
backgroundColor: tabBarBackgroundColor,
},
]}
onLayout={this.onTabContainerLayout}>
{tabs.map((name, index) => {
let tab = { title: name } as TabData
if (tabs.length - 1 >= index) {
tab = tabs[index]
}
return this.renderTab(
tab,
index,
tabWidth,
this.measureTab.bind(this, index),
styles,
theme,
)
})}
{renderUnderline ? (
renderUnderline(underlineProps.style)
) : (
<Animated.View {...underlineProps.style} />
)}
</View>
</ScrollView>
</View>
)
}}
</WithTheme>
)
}
onTabContainerLayout = (e: LayoutChangeEvent) => {
this._tabContainerMeasurements = e.nativeEvent.layout
const width = this._tabContainerMeasurements.width
// fix: https://github.com/ant-design/ant-design-mobile-rn/issues/162
// if (width < WINDOW_WIDTH) {
// width = WINDOW_WIDTH;
// }
this.setState({ _tabContainerWidth: width })
if (!this.props.dynamicTabUnderlineWidth) {
this.state._widthTabUnderline.setValue(width / this.props.tabs.length)
}
this.updateView({ value: this.props.scrollValue._value })
}
onContainerLayout = (e: LayoutChangeEvent) => {
this._containerMeasurements = e.nativeEvent.layout
this.setState({ _containerWidth: this._containerMeasurements.width })
this.updateView({ value: this.props.scrollValue._value })
}
} | the_stack |
import util from 'util'
import BugsnagPluginAwsLambda from '../src/'
import Client, { EventDeliveryPayload, SessionDeliveryPayload } from '@bugsnag/core/client'
const createClient = (events: EventDeliveryPayload[], sessions: SessionDeliveryPayload[], config = {}) => {
const client = new Client({ apiKey: 'AN_API_KEY', plugins: [BugsnagPluginAwsLambda], ...config })
// @ts-ignore the following property is not defined on the public Event interface
client.Event.__type = 'nodejs'
// a flush failure won't throw as we don't want to crash apps if delivery takes
// too long. To avoid the unit tests passing when this happens, we make the logger
// throw on any 'error' log call
client._logger.error = (...args) => { throw new Error(util.format(args)) }
client._delivery = {
sendEvent (payload, cb = () => {}) {
events.push(payload)
cb()
},
sendSession (payload, cb = () => {}) {
sessions.push(payload)
cb()
}
}
return client
}
const DEFAULT_REMAINING_MS = 250
let getRemainingTimeInMillis: jest.MockedFunction<() => number>
beforeEach(() => {
getRemainingTimeInMillis = jest.fn()
.mockReturnValueOnce(DEFAULT_REMAINING_MS)
.mockReturnValueOnce(DEFAULT_REMAINING_MS / 2)
.mockImplementationOnce(() => { throw new Error('unexpected call to "getRemainingTimeInMillis"') })
})
describe('plugin: aws lambda', () => {
it('has a name', () => {
expect(BugsnagPluginAwsLambda.name).toBe('awsLambda')
const client = new Client({ apiKey: 'AN_API_KEY', plugins: [BugsnagPluginAwsLambda] })
const plugin = client.getPlugin('awsLambda')
expect(plugin).toBeTruthy()
})
it('exports a "createHandler" function', () => {
const client = new Client({ apiKey: 'AN_API_KEY', plugins: [BugsnagPluginAwsLambda] })
const plugin = client.getPlugin('awsLambda')
expect(plugin).toMatchObject({ createHandler: expect.any(Function) })
})
it('adds the context as metadata', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = (event: any, context: any) => 'abc'
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(await wrappedHandler(event, context)).toBe('abc')
expect(client.getMetadata('AWS Lambda context')).toEqual(context)
})
it('logs an error if flush times out', async () => {
const client = new Client({ apiKey: 'AN_API_KEY', plugins: [BugsnagPluginAwsLambda] })
client._logger.error = jest.fn()
client._delivery = {
sendEvent (payload, cb = () => {}) {
setTimeout(cb, 250)
},
sendSession (payload, cb = () => {}) {
setTimeout(cb, 250)
}
}
const handler = () => {
client.notify('hello')
return 'abc'
}
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const timeoutError = new Error('flush timed out after 20ms')
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler({ flushTimeoutMs: 20 })
const wrappedHandler = bugsnagHandler(handler)
expect(await wrappedHandler(event, context)).toBe('abc')
expect(client._logger.error).toHaveBeenCalledWith(`Delivery may be unsuccessful: ${timeoutError.message}`)
})
it('returns a wrapped handler that resolves to the original return value (async)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = () => 'abc'
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(await handler()).toBe('abc')
expect(await wrappedHandler(event, context)).toBe('abc')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('notifies when an error is thrown (async)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const error = new Error('oh no')
const handler = (event: any, context: any) => { throw error }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorMessage).toBe(error.message)
expect(sessions).toHaveLength(1)
})
it('does not notify when "autoDetectErrors" is false (async)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { autoDetectErrors: false })
const error = new Error('oh no')
const handler = (event: any, context: any) => { throw error }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('does not notify when "unhandledExceptions" are disabled (async)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { enabledErrorTypes: { unhandledExceptions: false } })
const error = new Error('oh no')
const handler = (event: any, context: any) => { throw error }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('returns a wrapped handler that resolves to the value passed to the callback (callback)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = (event: any, context: any, callback: any) => { callback(null, 'xyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('notifies when an error is passed (callback)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const error = new Error('uh oh')
const handler = (event: any, context: any, callback: any) => { callback(error, 'xyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorMessage).toBe(error.message)
expect(sessions).toHaveLength(1)
})
it('does not notify when "autoDetectErrors" is false (callback)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { autoDetectErrors: false })
const error = new Error('uh oh')
const handler = (event: any, context: any, callback: any) => { callback(error, 'xyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('does not notify when "unhandledExceptions" are disabled (callback)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { enabledErrorTypes: { unhandledExceptions: false } })
const error = new Error('uh oh')
const handler = (event: any, context: any, callback: any) => { callback(error, 'xyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('works when an async handler has the callback parameter', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any, callback: any) => 'abcxyz'
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('abcxyz')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('works when an async handler has the callback parameter and calls it', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any, callback: any) => { callback(null, 'abcxyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('abcxyz')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('works when an async handler has the callback parameter and throws', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const error = new Error('abcxyz')
const handler = async (event: any, context: any, callback: any) => { throw error }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorMessage).toBe(error.message)
expect(sessions).toHaveLength(1)
})
it('works when an async handler has the callback parameter and calls it with an error', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const error = new Error('abcxyz')
const handler = async (event: any, context: any, callback: any) => { callback(error) }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toThrow(error)
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorMessage).toBe(error.message)
expect(sessions).toHaveLength(1)
})
it('will track sessions when "autoTrackSessions" is enabled', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { autoTrackSessions: true })
const handler = () => 'abc'
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(await wrappedHandler(event, context)).toBe('abc')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('will not track sessions when "autoTrackSessions" is disabled', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions, { autoTrackSessions: false })
const handler = () => 'abc'
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(await wrappedHandler(event, context)).toBe('abc')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
})
it('notifies when it is close to timing out (async)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any) => new Promise(resolve => {
setTimeout(() => resolve('xyz'), DEFAULT_REMAINING_MS + 100)
})
const event = { very: 'eventy' }
const context = { extremely: 'contextual', getRemainingTimeInMillis }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(1)
expect(events[0].events).toHaveLength(1)
expect(events[0].events[0].errors).toHaveLength(1)
expect(events[0].events[0].context).toBe('Lambda timeout approaching')
const expectedError = {
errorClass: 'LambdaTimeoutApproaching',
errorMessage: `Lambda will timeout in ${DEFAULT_REMAINING_MS / 2}ms`,
stacktrace: [],
type: 'nodejs'
}
expect(events[0].events[0].errors[0]).toEqual(expectedError)
expect(sessions).toHaveLength(1)
})
it('notifies when it is close to timing out (callback)', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = (event: any, context: any, callback: any) => new Promise(resolve => {
setTimeout(() => callback(null, 'xyz'), DEFAULT_REMAINING_MS + 100)
})
const event = { very: 'eventy' }
const context = { extremely: 'contextual', getRemainingTimeInMillis }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(1)
expect(events[0].events).toHaveLength(1)
expect(events[0].events[0].errors).toHaveLength(1)
expect(events[0].events[0].context).toBe('Lambda timeout approaching')
const expectedError = {
errorClass: 'LambdaTimeoutApproaching',
errorMessage: `Lambda will timeout in ${DEFAULT_REMAINING_MS / 2}ms`,
stacktrace: [],
type: 'nodejs'
}
expect(events[0].events[0].errors[0]).toEqual(expectedError)
expect(sessions).toHaveLength(1)
})
it('uses the function name as the event context when present', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any) => new Promise(resolve => {
setTimeout(() => resolve('xyz'), DEFAULT_REMAINING_MS + 100)
})
const event = { very: 'eventy' }
const context = { functionName: 'MyCoolAndGoodLambdaFunction', getRemainingTimeInMillis }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorClass).toBe('LambdaTimeoutApproaching')
expect(events[0].events[0].errors[0].errorMessage).toBe(`Lambda will timeout in ${DEFAULT_REMAINING_MS / 2}ms`)
expect(events[0].events[0].errors[0].stacktrace).toHaveLength(0)
expect(events[0].events[0].context).toBe('MyCoolAndGoodLambdaFunction')
expect(sessions).toHaveLength(1)
})
it('allows the "lambdaTimeoutNotifyMs" to be changed', async () => {
// With 6 seconds remaining and a resolve timeout of 500ms, the timeout
// warning will never be triggered unless the custom "lambdaTimeoutNotifyMs"
// takes effect
const superLongWaitMs = 6000
const resolveTimeoutMs = 500
const lambdaTimeoutNotifyMs = superLongWaitMs - (resolveTimeoutMs / 2)
getRemainingTimeInMillis = jest.fn()
.mockReturnValueOnce(superLongWaitMs)
.mockReturnValueOnce(superLongWaitMs - lambdaTimeoutNotifyMs)
.mockImplementationOnce(() => { throw new Error('unexpected call to "getRemainingTimeInMillis"') })
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any) => new Promise(resolve => {
setTimeout(() => resolve('xyz'), resolveTimeoutMs)
})
const event = { very: 'eventy' }
const context = { extremely: 'contextual', getRemainingTimeInMillis }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler({ lambdaTimeoutNotifyMs })
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(1)
expect(events[0].events).toHaveLength(1)
expect(events[0].events[0].errors).toHaveLength(1)
expect(events[0].events[0].context).toBe('Lambda timeout approaching')
const expectedError = {
errorClass: 'LambdaTimeoutApproaching',
errorMessage: `Lambda will timeout in ${resolveTimeoutMs / 2}ms`,
stacktrace: [],
type: 'nodejs'
}
expect(events[0].events[0].errors[0]).toEqual(expectedError)
expect(sessions).toHaveLength(1)
})
it('does not notify if "lambdaTimeoutNotifyMs" is 0', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const handler = async (event: any, context: any) => new Promise(resolve => {
setTimeout(() => resolve('xyz'), 100)
})
const event = { very: 'eventy' }
const context = { extremely: 'contextual', getRemainingTimeInMillis }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler({ lambdaTimeoutNotifyMs: 0 })
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
expect(await wrappedHandler(event, context)).toBe('xyz')
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(1)
})
it('supports a string as the error argument in a lambda callback', async () => {
const events: EventDeliveryPayload[] = []
const sessions: SessionDeliveryPayload[] = []
const client = createClient(events, sessions)
const message = 'uh oh'
const handler = (event: any, context: any, callback: any) => { callback(message, 'xyz') }
const event = { very: 'eventy' }
const context = { extremely: 'contextual' }
const plugin = client.getPlugin('awsLambda')
if (!plugin) {
throw new Error('Plugin was not loaded!')
}
const bugsnagHandler = plugin.createHandler()
const wrappedHandler = bugsnagHandler(handler)
expect(events).toHaveLength(0)
expect(sessions).toHaveLength(0)
await expect(() => wrappedHandler(event, context)).rejects.toBe(message)
expect(events).toHaveLength(1)
expect(events[0].events[0].errors[0].errorMessage).toBe(message)
expect(sessions).toHaveLength(1)
})
}) | the_stack |
import {PbAesCtrHmacAeadKey, PbAesCtrHmacAeadKeyFormat, PbAesCtrKey, PbAesCtrKeyFormat, PbAesCtrParams, PbHashType, PbHmacKey, PbHmacKeyFormat, PbHmacParams, PbKeyData} from '../internal/proto';
import * as Random from '../subtle/random';
import {assertExists} from '../testing/internal/test_utils';
import {AesCtrHmacAeadKeyManager} from './aes_ctr_hmac_aead_key_manager';
import {Aead} from './internal/aead';
const KEY_TYPE = 'type.googleapis.com/google.crypto.tink.AesCtrHmacAeadKey';
const VERSION = 0;
/////////////////////////////////////////////////////////////////////////////
// Helper functions for tests
/** creates new AesCtrHmacAeadKeyFormat with allowed parameters */
function createTestKeyFormat(): PbAesCtrHmacAeadKeyFormat {
const KEY_SIZE = 16;
const IV_SIZE = 12;
const TAG_SIZE = 16;
const keyFormat = new PbAesCtrHmacAeadKeyFormat();
const aesCtrKeyFormat = new PbAesCtrKeyFormat();
aesCtrKeyFormat.setKeySize(KEY_SIZE);
const aesCtrParams = new PbAesCtrParams();
aesCtrParams.setIvSize(IV_SIZE);
aesCtrKeyFormat.setParams(aesCtrParams);
keyFormat.setAesCtrKeyFormat(aesCtrKeyFormat);
// set HMAC key
const hmacKeyFormat = new PbHmacKeyFormat();
hmacKeyFormat.setKeySize(KEY_SIZE);
const hmacParams = new PbHmacParams();
hmacParams.setHash(PbHashType.SHA1);
hmacParams.setTagSize(TAG_SIZE);
hmacKeyFormat.setParams(hmacParams);
keyFormat.setHmacKeyFormat(hmacKeyFormat);
return keyFormat;
}
/** creates new AesCtrHmacAeadKey with allowed parameters */
function createTestKey(): PbAesCtrHmacAeadKey {
const KEY_SIZE = 16;
const IV_SIZE = 12;
const TAG_SIZE = 16;
const key = new PbAesCtrHmacAeadKey();
key.setVersion(0);
const aesCtrKey = new PbAesCtrKey();
aesCtrKey.setVersion(0);
const aesCtrParams = new PbAesCtrParams();
aesCtrParams.setIvSize(IV_SIZE);
aesCtrKey.setParams(aesCtrParams);
aesCtrKey.setKeyValue(Random.randBytes(KEY_SIZE));
key.setAesCtrKey(aesCtrKey);
// set HMAC key
const hmacKey = new PbHmacKey();
hmacKey.setVersion(0);
const hmacParams = new PbHmacParams();
hmacParams.setHash(PbHashType.SHA1);
hmacParams.setTagSize(TAG_SIZE);
hmacKey.setParams(hmacParams);
hmacKey.setKeyValue(Random.randBytes(KEY_SIZE));
key.setHmacKey(hmacKey);
return key;
}
/** creates new PbKeyData with allowed parameters */
function createTestKeyData(): PbKeyData {
const keyData = new PbKeyData()
.setTypeUrl(KEY_TYPE)
.setValue(createTestKey().serializeBinary())
.setKeyMaterialType(PbKeyData.KeyMaterialType.SYMMETRIC);
return keyData;
}
describe('aes ctr hmac aead key manager test', function() {
/////////////////////////////////////////////////////////////////////////////
// tests for newKey method
// newKey method -- key formats
it('new key bad key format', async function() {
const keyFormat = new PbAesCtrKeyFormat();
const manager = new AesCtrHmacAeadKeyManager();
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe('SecurityException: Expected AesCtrHmacAeadKeyFormat-proto');
return;
}
fail('An exception should be thrown.');
});
it('new key bad serialized key', async function() {
// this is not a serialized key format
const serializedKeyFormat = new Uint8Array(4);
const manager = new AesCtrHmacAeadKeyManager();
try {
manager.getKeyFactory().newKey(serializedKeyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Could not parse the given Uint8Array as a serialized' +
' proto of ' + KEY_TYPE);
return;
}
fail('An exception should be thrown.');
});
// newKey method -- bad parametrs of AES CTR KEY format
it('new key not supported aes ctr key size', async function() {
const keySize: number = 11;
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
keyFormat.getAesCtrKeyFormat()?.setKeySize(keySize);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'InvalidArgumentsException: unsupported AES key size: ' +
keySize);
return;
}
fail('An exception should be thrown.');
});
it('new key iv size out of range', async function() {
const ivSizeOutOfRange: number[] = [10, 18];
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
const ivSizeOutOfRangeLength = ivSizeOutOfRange.length;
for (let i = 0; i < ivSizeOutOfRangeLength; i++) {
keyFormat.getAesCtrKeyFormat()?.getParams()?.setIvSize(
ivSizeOutOfRange[i]);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid AES CTR HMAC key format: IV size is ' +
'out of range: ' + ivSizeOutOfRange[i]);
continue;
}
fail('An exception should be thrown.');
}
});
// newKey method -- bad parametrs of HMAC KEY format
it('new key small hmac key size', async function() {
const keySize: number = 11;
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
keyFormat.getHmacKeyFormat()?.setKeySize(keySize);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid AES CTR HMAC key format: HMAC key is' +
' too small: ' + keySize);
return;
}
fail('An exception should be thrown.');
});
it('new key hash type unsupported', async function() {
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
keyFormat.getHmacKeyFormat()?.getParams()?.setHash(PbHashType.UNKNOWN_HASH);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString()).toBe('SecurityException: Unknown hash type.');
return;
}
fail('An exception should be thrown.');
});
it('new key small tag size', async function() {
const SMALL_TAG_SIZE = 8;
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
keyFormat.getHmacKeyFormat()?.getParams()?.setTagSize(SMALL_TAG_SIZE);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid HMAC params: tag size ' +
SMALL_TAG_SIZE + ' is too small.');
return;
}
fail('An exception should be thrown.');
});
it('new key big tag size for hash type', async function() {
const tagSizes = [
{'hashType': PbHashType.SHA1, 'tagSize': 22},
{'hashType': PbHashType.SHA256, 'tagSize': 34},
{'hashType': PbHashType.SHA512, 'tagSize': 66},
];
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
const tagSizesLength = tagSizes.length;
for (let i = 0; i < tagSizesLength; i++) {
keyFormat.getHmacKeyFormat()?.getParams()?.setHash(
tagSizes[i]['hashType']);
keyFormat.getHmacKeyFormat()?.getParams()?.setTagSize(
tagSizes[i]['tagSize']);
try {
manager.getKeyFactory().newKey(keyFormat);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid HMAC params: tag size ' +
tagSizes[i]['tagSize'] + ' is out of range.');
continue;
}
fail('An exception should be thrown.');
}
});
it('new key via format proto', async function() {
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
const key = manager.getKeyFactory().newKey(keyFormat);
// testing AES CTR key
expect(key.getAesCtrKey()?.getKeyValue().length)
.toBe(keyFormat.getAesCtrKeyFormat()?.getKeySize());
expect(key.getAesCtrKey()?.getVersion()).toBe(0);
expect(key.getAesCtrKey()?.getParams()?.getIvSize())
.toBe(keyFormat.getAesCtrKeyFormat()?.getParams()?.getIvSize());
// testing HMAC key
expect(key.getHmacKey()?.getKeyValue()?.length)
.toBe(keyFormat.getHmacKeyFormat()?.getKeySize());
expect(key.getHmacKey()?.getVersion()).toBe(0);
expect(key.getHmacKey()?.getParams()?.getHash())
.toBe(keyFormat.getHmacKeyFormat()?.getParams()?.getHash());
expect(key.getHmacKey()?.getParams()?.getTagSize())
.toBe(keyFormat.getHmacKeyFormat()?.getParams()?.getTagSize());
});
it('new key via serialized format proto', async function() {
const manager = new AesCtrHmacAeadKeyManager();
const keyFormat = createTestKeyFormat();
const serializedKeyFormat = keyFormat.serializeBinary();
const key = manager.getKeyFactory().newKey(serializedKeyFormat);
// testing AES CTR key
expect(key.getAesCtrKey()?.getKeyValue().length)
.toBe(keyFormat.getAesCtrKeyFormat()?.getKeySize());
expect(key.getAesCtrKey()?.getVersion()).toBe(0);
expect(key.getAesCtrKey()?.getParams()?.getIvSize())
.toBe(keyFormat.getAesCtrKeyFormat()?.getParams()?.getIvSize());
// testing HMAC key
expect(key.getHmacKey()?.getKeyValue()?.length)
.toBe(keyFormat.getHmacKeyFormat()?.getKeySize());
expect(key.getHmacKey()?.getVersion()).toBe(0);
expect(key.getHmacKey()?.getParams()?.getHash())
.toBe(keyFormat.getHmacKeyFormat()?.getParams()?.getHash());
expect(key.getHmacKey()?.getParams()?.getTagSize())
.toBe(keyFormat.getHmacKeyFormat()?.getParams()?.getTagSize());
});
/////////////////////////////////////////////////////////////////////////////
// tests for NewKeyData method
it('new key data bad serialized key', async function() {
const serializedKeyFormats = [new Uint8Array(1), new Uint8Array(0)];
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const serializedKeyFormatsLength = serializedKeyFormats.length;
for (let i = 0; i < serializedKeyFormatsLength; i++) {
try {
aeadKeyManager.getKeyFactory().newKeyData(serializedKeyFormats[i]);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Could not parse the given Uint8Array as a ' +
'serialized proto of ' + KEY_TYPE);
continue;
}
fail(
'An exception should be thrown for the string: ' +
serializedKeyFormats[i]);
}
});
it('new key data from valid key', async function() {
const keyFormat = createTestKeyFormat();
const serializedKeyFormat = keyFormat.serializeBinary();
const manager = new AesCtrHmacAeadKeyManager();
const keyData = manager.getKeyFactory().newKeyData(serializedKeyFormat);
expect(keyData.getTypeUrl()).toBe(KEY_TYPE);
expect(keyData.getKeyMaterialType())
.toBe(PbKeyData.KeyMaterialType.SYMMETRIC);
const key = PbAesCtrHmacAeadKey.deserializeBinary(keyData.getValue());
expect(key.getAesCtrKey()?.getKeyValue().length)
.toBe(keyFormat.getAesCtrKeyFormat()?.getKeySize());
expect(key.getHmacKey()?.getKeyValue()?.length)
.toBe(keyFormat.getHmacKeyFormat()?.getKeySize());
});
/////////////////////////////////////////////////////////////////////////////
// tests for getPrimitive method
it('get primitive unsupported key data type', async function() {
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const keyData = createTestKeyData().setTypeUrl('bad type url');
try {
await aeadKeyManager.getPrimitive(Aead, keyData);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Key type ' + keyData.getTypeUrl() +
' is not supported. This key manager supports ' + KEY_TYPE + '.');
return;
}
fail('An exception should be thrown');
});
it('get primitive unsupported key type', async function() {
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key = new PbAesCtrKey();
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Given key type is not supported. ' +
'This key manager supports ' + KEY_TYPE + '.');
return;
}
fail('An exception should be thrown');
});
it('get primitive bad version', async function() {
const version = 1;
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
key.getAesCtrKey()?.setVersion(version);
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Version is out of bound, must be between 0 ' +
'and ' + VERSION + '.');
return;
}
fail('An exception should be thrown');
});
it('get primitive short aes ctr key', async function() {
const keySize = 5;
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
key.getAesCtrKey()?.setKeyValue(new Uint8Array(keySize));
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'InvalidArgumentsException: unsupported AES key size: ' +
keySize);
return;
}
fail('An exception should be thrown');
});
it('get primitive aes ctr key small iv size', async function() {
const ivSizeOutOfRange: number[] = [9, 19];
const manager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
const ivSizeOutOfRangeLength = ivSizeOutOfRange.length;
for (let i = 0; i < ivSizeOutOfRangeLength; i++) {
key.getAesCtrKey()?.getParams()?.setIvSize(ivSizeOutOfRange[i]);
try {
await manager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid AES CTR HMAC key format: IV size is ' +
'out of range: ' + ivSizeOutOfRange[i]);
continue;
}
fail('An exception should be thrown.');
}
});
it('get primitive short hmac key', async function() {
const keySize = 5;
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
key.getHmacKey()?.setKeyValue(new Uint8Array(keySize));
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid AES CTR HMAC key format: HMAC key is' +
' too small: ' + keySize);
return;
}
fail('An exception should be thrown');
});
it('get primitive hmac key unsupported hash type', async function() {
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
key.getHmacKey()?.getParams()?.setHash(PbHashType.UNKNOWN_HASH);
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString()).toBe('SecurityException: Unknown hash type.');
return;
}
fail('An exception should be thrown');
});
it('get primitive hmac key small tag size', async function() {
const SMALL_TAG_SIZE = 9;
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
key.getHmacKey()?.getParams()?.setTagSize(SMALL_TAG_SIZE);
try {
await aeadKeyManager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid HMAC params: tag size ' +
SMALL_TAG_SIZE + ' is too small.');
return;
}
fail('An exception should be thrown');
});
it('get primitive hmac big tag size', async function() {
const tagSizes = [
{'hashType': PbHashType.SHA1, 'tagSize': 22},
{'hashType': PbHashType.SHA256, 'tagSize': 34},
{'hashType': PbHashType.SHA512, 'tagSize': 66},
];
const manager = new AesCtrHmacAeadKeyManager();
const key: PbAesCtrHmacAeadKey = createTestKey();
const tagSizesLength = tagSizes.length;
for (let i = 0; i < tagSizesLength; i++) {
const params = assertExists(key.getHmacKey()?.getParams());
params.setHash(tagSizes[i]['hashType']);
params.setTagSize(tagSizes[i]['tagSize']);
try {
await manager.getPrimitive(Aead, key);
} catch (e) {
expect(e.toString())
.toBe(
'SecurityException: Invalid HMAC params: tag size ' +
tagSizes[i]['tagSize'] + ' is out of range.');
continue;
}
fail('An exception should be thrown.');
}
});
// tests for getting primitive from valid key/keyData
it('get primitive from key', async function() {
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const key = createTestKey();
const plaintext = Random.randBytes(8);
const aad = Random.randBytes(8);
const primitive: Aead = await aeadKeyManager.getPrimitive(Aead, key);
const ciphertext = await primitive.encrypt(plaintext, aad);
const decryptedCiphertext = await primitive.decrypt(ciphertext, aad);
expect(decryptedCiphertext).toEqual(plaintext);
});
it('get primitive from key data', async function() {
const aeadKeyManager = new AesCtrHmacAeadKeyManager();
const keyData = createTestKeyData();
const plaintext = Random.randBytes(8);
const aad = Random.randBytes(8);
const primitive: Aead = await aeadKeyManager.getPrimitive(Aead, keyData);
const ciphertext = await primitive.encrypt(plaintext, aad);
const decryptedCiphertext = await primitive.decrypt(ciphertext, aad);
expect(decryptedCiphertext).toEqual(plaintext);
});
/////////////////////////////////////////////////////////////////////////////
// tests for getVersion, getKeyType and doesSupport methods
it('get version should be zero', async function() {
const manager = new AesCtrHmacAeadKeyManager();
expect(manager.getVersion()).toBe(0);
});
it('get key type should be aes ctr hmac aead key', async function() {
const manager = new AesCtrHmacAeadKeyManager();
expect(manager.getKeyType()).toBe(KEY_TYPE);
});
it('does support should support aes ctr hmac aead key', async function() {
const manager = new AesCtrHmacAeadKeyManager();
expect(manager.doesSupport(KEY_TYPE)).toBe(true);
});
it('get primitive type should be aead', async function() {
const manager = new AesCtrHmacAeadKeyManager();
expect(manager.getPrimitiveType()).toBe(Aead);
});
}); | the_stack |
import { when } from 'jest-when';
import { getAddressAndPublicKeyFromPassphrase } from '@liskhq/lisk-cryptography';
import { codec } from '@liskhq/lisk-codec';
import { Transaction } from '../../src/transaction';
import { nodeInfo, schema, tx, accountSchema } from '../utils/transaction';
describe('transaction', () => {
let channelMock: any;
let transaction: Transaction;
const passphrases = [
'trim elegant oven term access apple obtain error grain excite lawn neck',
'faculty inspire crouch quit sorry vague hard ski scrap jaguar garment limb',
];
const passphrase1 = 'trim elegant oven term access apple obtain error grain excite lawn neck';
const { publicKey: publicKey1 } = getAddressAndPublicKeyFromPassphrase(passphrase1);
const publicKey2 = Buffer.from(
'fa406b6952d377f0278920e3eb8da919e4cf5c68b02eeba5d8b3334fdc0369b6',
'hex',
);
const txHex =
'0802100018362080ade2042a20dd4ff255fe04dd0159a468e9e9c8872c4f4466220f7e326377a0ceb9df2fa21a321d0880ade2041214654087c2df870402ab0b1996616fd3355d61f62c1a003a4079cb29dca7bb9fce73a1e8ca28264f779074d259c341b536bae9a54c0a2e4713580fcb192f9f15f43730650d69bb1f3dcfb4cb6da7d69ca990a763ed78569700';
const accountHex =
'0a14ab0041a7d3f7b2c290b5b834d46bdc7b7eb8581512050880c2d72f1a020800220208002a3b0a1a0a0a67656e657369735f3834180020850528003080a094a58d1d121d0a14ab0041a7d3f7b2c290b5b834d46bdc7b7eb858151080a094a58d1d';
const encodedTx = Buffer.from(txHex, 'hex');
const validTransaction = {
moduleID: 2,
assetID: 0,
nonce: BigInt('1'),
fee: BigInt('10000000'),
senderPublicKey: publicKey1,
asset: {
recipientAddress: Buffer.from('3a971fd02b4a07fc20aad1936d3cb1d263b96e0f', 'hex'),
amount: BigInt('4008489300000000'),
data: '',
},
};
const txId = Buffer.from(tx.id, 'hex');
beforeEach(() => {
channelMock = {
connect: jest.fn(),
disconnect: jest.fn(),
invoke: jest.fn(),
subscribe: jest.fn(),
};
when(channelMock.invoke)
.calledWith('app:getAccount', expect.anything())
.mockResolvedValue(accountHex as never)
.calledWith('app:getTransactionByID', expect.anything())
.mockResolvedValue(txHex as never)
.calledWith('app:getTransactionsFromPool')
.mockResolvedValue([txHex] as never)
.calledWith('app:postTransaction', expect.anything())
.mockResolvedValue(txHex as never);
transaction = new Transaction(channelMock, schema, nodeInfo);
});
describe('Transaction', () => {
describe('constructor', () => {
it('should initialize with channel', () => {
expect(transaction['_channel']).toBe(channelMock);
});
});
describe('get', () => {
describe('transaction by id as buffer', () => {
it('should invoke app:getTransactionByID', async () => {
await transaction.get(txId);
expect(channelMock.invoke).toHaveBeenCalledTimes(1);
expect(channelMock.invoke).toHaveBeenCalledWith('app:getTransactionByID', {
id: txId.toString('hex'),
});
});
});
describe('transaction by id as hex', () => {
it('should invoke app:getTransactionByID', async () => {
await transaction.get(txId.toString('hex'));
expect(channelMock.invoke).toHaveBeenCalledTimes(1);
expect(channelMock.invoke).toHaveBeenCalledWith('app:getTransactionByID', {
id: txId.toString('hex'),
});
});
});
});
describe('getFromPool', () => {
it('should invoke app:getTransactionsFromPool', async () => {
await transaction.getFromPool();
expect(channelMock.invoke).toHaveBeenCalledTimes(1);
expect(channelMock.invoke).toHaveBeenCalledWith('app:getTransactionsFromPool');
});
});
describe('create', () => {
describe('when called with a valid transaction', () => {
it('should return created tx', async () => {
const returnedTx = await transaction.create(validTransaction, passphrase1);
expect(returnedTx.signatures).toHaveLength(1);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
describe('when called without module id and module name in input', () => {
it('should throw error', async () => {
await expect(
transaction.create({ ...validTransaction, moduleID: undefined }, passphrase1),
).rejects.toThrow('Missing moduleID and moduleName');
});
});
describe('when called without asset id and asset name in input', () => {
it('should throw error', async () => {
await expect(
transaction.create({ ...validTransaction, assetID: undefined }, passphrase1),
).rejects.toThrow('Missing assetID and assetName');
});
});
describe('when called with module name which does not exist', () => {
it('should throw error', async () => {
await expect(
transaction.create(
{ ...validTransaction, moduleID: undefined, moduleName: 'newModule' },
passphrase1,
),
).rejects.toThrow('Module corresponding to name newModule not registered.');
});
});
describe('when called with asset name which does not exist', () => {
it('should throw error', async () => {
await expect(
transaction.create(
{ ...validTransaction, assetID: undefined, assetName: 'newAsset' },
passphrase1,
),
).rejects.toThrow('Asset corresponding to name newAsset not registered.');
});
});
describe('when called without nonce in input and account does not support nonce either', () => {
beforeEach(() => {
(codec as any)['_compileSchemas'] = [];
});
afterEach(() => {
(codec as any)['_compileSchemas'] = [];
});
it('should throw error', async () => {
const updatedSchema = {
...schema,
account: {
...schema.account,
properties: {
address: schema.account.properties.address,
keys: schema.account.properties.keys,
},
required: ['address', 'keys'],
},
};
transaction = new Transaction(channelMock, updatedSchema, nodeInfo);
await expect(
transaction.create({ ...validTransaction, nonce: undefined }, passphrase1),
).rejects.toThrow('Unsupported account type');
});
});
describe('when called with negative nonce in input', () => {
it('should throw error', async () => {
await expect(
transaction.create({ ...validTransaction, nonce: BigInt(-2452) }, passphrase1),
).rejects.toThrow('Nonce must be greater or equal to zero');
});
});
describe('when called with nonce equal to zero in input', () => {
it('should return created tx', async () => {
const returnedTx = await transaction.create(
{ ...validTransaction, nonce: BigInt(0) },
passphrase1,
);
expect(returnedTx.signatures).toHaveLength(1);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
describe('when called without sender public key in input', () => {
it('should return created tx', async () => {
const returnedTx = await transaction.create(
{ ...validTransaction, senderPublicKey: undefined },
passphrase1,
);
expect(returnedTx.signatures).toHaveLength(1);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
describe('when called with multi-signature account in input', () => {
it('should return created tx', async () => {
const multisigAccount = {
address: Buffer.from('ab0041a7d3f7b2c290b5b834d46bdc7b7eb85815', 'hex'),
token: { balance: BigInt('100000000') },
sequence: { nonce: BigInt('0') },
keys: {
numberOfSignatures: 1,
mandatoryKeys: [publicKey1],
optionalKeys: [publicKey2],
},
};
const multisigAccountHex = codec.encode(accountSchema, multisigAccount);
when(channelMock.invoke)
.calledWith('app:getAccount', expect.anything())
.mockResolvedValue(multisigAccountHex.toString('hex') as never);
const returnedTx = await transaction.create(validTransaction, passphrase1);
expect(returnedTx.signatures).toHaveLength(2);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
describe('when called with optional keys in input', () => {
it('should return created tx', async () => {
const options = {
includeSenderSignature: true,
multisignatureKeys: {
mandatoryKeys: [],
optionalKeys: [publicKey2],
},
};
const returnedTx = await transaction.create(validTransaction, passphrase1, options);
expect(returnedTx.signatures).toHaveLength(2);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
});
describe('sign', () => {
describe('when called with a valid transation', () => {
it('should return some signed transaction', () => {
const returnedTx = transaction.sign(validTransaction, passphrases);
expect(returnedTx).toBeDefined();
});
});
describe('when called with multi-signature account in input', () => {
it('should return created tx', async () => {
const multisigAccount = {
address: Buffer.from('ab0041a7d3f7b2c290b5b834d46bdc7b7eb85815', 'hex'),
token: { balance: BigInt('100000000') },
sequence: { nonce: BigInt('0') },
keys: {
numberOfSignatures: 1,
mandatoryKeys: [publicKey1],
optionalKeys: [publicKey2],
},
};
const multisigAccountHex = codec.encode(accountSchema, multisigAccount);
when(channelMock.invoke)
.calledWith('app:getAccount', expect.anything())
.mockResolvedValue(multisigAccountHex.toString('hex') as never);
const returnedTx = await transaction.sign(validTransaction, passphrases);
expect(returnedTx.signatures).toHaveLength(2);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
describe('when called with optional keys in input', () => {
it('should return created tx', async () => {
const options = {
includeSenderSignature: true,
multisignatureKeys: {
mandatoryKeys: [],
optionalKeys: [publicKey2],
},
};
const returnedTx = await transaction.sign(validTransaction, passphrases, options);
expect(returnedTx.signatures).toHaveLength(2);
expect(returnedTx.signatures).toMatchSnapshot();
});
});
});
describe('send', () => {
it('should invoke app:postTransaction', async () => {
const trxId = await transaction.send(tx);
expect(channelMock.invoke).toHaveBeenCalledTimes(1);
expect(channelMock.invoke).toHaveBeenCalledWith('app:postTransaction', {
transaction: txHex,
});
expect(trxId).toEqual(txHex);
});
});
describe('decode', () => {
describe('transaction from input as buffer', () => {
it('should return decoded transaction', () => {
const decodedTx = transaction.decode(encodedTx);
expect(decodedTx).toMatchSnapshot();
});
});
describe('transaction from input as hex', () => {
it('should return decoded transaction', () => {
const decodedTx = transaction.decode(encodedTx.toString('hex'));
expect(decodedTx).toMatchSnapshot();
});
});
});
describe('encode', () => {
it('should return encoded transaction', () => {
const returnedTx = transaction.encode(tx);
expect(returnedTx).toEqual(encodedTx);
});
});
describe('computeMinFee', () => {
it('should return some value', () => {
const fee = transaction.computeMinFee(tx);
expect(fee).toBeDefined();
});
});
describe('toJSON', () => {
it('should return decoded transaction in JSON', () => {
const txAsJSON = transaction.toJSON(tx);
expect(() => JSON.parse(JSON.stringify(txAsJSON))).not.toThrow();
});
});
describe('fromJSON', () => {
it('should return decoded transaction in JSON', () => {
const txCopy = { ...tx };
(txCopy as any).id = txId;
const txAsJSON = transaction.toJSON(txCopy);
const txAsObject = transaction.fromJSON(txAsJSON);
expect(txAsObject).toEqual(txCopy);
});
});
});
}); | the_stack |
* @param {Uint32Array} lgr_comm
* @param {WasmPastaFpPlonkVerifierIndex} index
* @param {WasmPastaFpProverProof} proof
* @returns {WasmPastaFpPlonkOracles}
*/
export function caml_pasta_fp_plonk_oracles_create(lgr_comm: Uint32Array, index: WasmPastaFpPlonkVerifierIndex, proof: WasmPastaFpProverProof): WasmPastaFpPlonkOracles;
/**
* @returns {WasmPastaFpPlonkOracles}
*/
export function caml_pasta_fp_plonk_oracles_dummy(): WasmPastaFpPlonkOracles;
/**
* @param {WasmPastaFpPlonkOracles} x
* @returns {WasmPastaFpPlonkOracles}
*/
export function caml_pasta_fp_plonk_oracles_deep_copy(x: WasmPastaFpPlonkOracles): WasmPastaFpPlonkOracles;
/**
* @param {WasmPastaFpPlonkIndex} index
* @param {Uint8Array} primary_input
* @param {Uint8Array} auxiliary_input
* @param {Uint8Array} prev_challenges
* @param {Uint32Array} prev_sgs
* @returns {WasmPastaFpProverProof}
*/
export function caml_pasta_fp_plonk_proof_create(index: WasmPastaFpPlonkIndex, primary_input: Uint8Array, auxiliary_input: Uint8Array, prev_challenges: Uint8Array, prev_sgs: Uint32Array): WasmPastaFpProverProof;
/**
* @param {Uint32Array} lgr_comm
* @param {WasmPastaFpPlonkVerifierIndex} index
* @param {WasmPastaFpProverProof} proof
* @returns {boolean}
*/
export function caml_pasta_fp_plonk_proof_verify(lgr_comm: Uint32Array, index: WasmPastaFpPlonkVerifierIndex, proof: WasmPastaFpProverProof): boolean;
/**
* @param {WasmVecVecVestaPolyComm} lgr_comms
* @param {Uint32Array} indexes
* @param {Uint32Array} proofs
* @returns {boolean}
*/
export function caml_pasta_fp_plonk_proof_batch_verify(lgr_comms: WasmVecVecVestaPolyComm, indexes: Uint32Array, proofs: Uint32Array): boolean;
/**
* @returns {WasmPastaFpProverProof}
*/
export function caml_pasta_fp_plonk_proof_dummy(): WasmPastaFpProverProof;
/**
* @param {WasmPastaFpProverProof} x
* @returns {WasmPastaFpProverProof}
*/
export function caml_pasta_fp_plonk_proof_deep_copy(x: WasmPastaFpProverProof): WasmPastaFpProverProof;
/**
* @returns {number}
*/
export function caml_pasta_fp_size_in_bits(): number;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fp_size(): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fp_add(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fp_sub(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_negate(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fp_mul(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fp_div(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array | undefined}
*/
export function caml_pasta_fp_inv(x: Uint8Array): Uint8Array | undefined;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_square(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {boolean}
*/
export function caml_pasta_fp_is_square(x: Uint8Array): boolean;
/**
* @param {Uint8Array} x
* @returns {Uint8Array | undefined}
*/
export function caml_pasta_fp_sqrt(x: Uint8Array): Uint8Array | undefined;
/**
* @param {number} i
* @returns {Uint8Array}
*/
export function caml_pasta_fp_of_int(i: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {string}
*/
export function caml_pasta_fp_to_string(x: Uint8Array): string;
/**
* @param {string} s
* @returns {Uint8Array}
*/
export function caml_pasta_fp_of_string(s: string): Uint8Array;
/**
* @param {Uint8Array} x
*/
export function caml_pasta_fp_print(x: Uint8Array): void;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {number}
*/
export function caml_pasta_fp_compare(x: Uint8Array, y: Uint8Array): number;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {boolean}
*/
export function caml_pasta_fp_equal(x: Uint8Array, y: Uint8Array): boolean;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fp_random(): Uint8Array;
/**
* @param {number} i
* @returns {Uint8Array}
*/
export function caml_pasta_fp_rng(i: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_to_bigint(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_of_bigint(x: Uint8Array): Uint8Array;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fp_two_adic_root_of_unity(): Uint8Array;
/**
* @param {number} log2_size
* @returns {Uint8Array}
*/
export function caml_pasta_fp_domain_generator(log2_size: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_to_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_of_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fp_deep_copy(x: Uint8Array): Uint8Array;
/**
* @returns {number}
*/
export function caml_pasta_fq_size_in_bits(): number;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fq_size(): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fq_add(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fq_sub(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_negate(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fq_mul(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_pasta_fq_div(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array | undefined}
*/
export function caml_pasta_fq_inv(x: Uint8Array): Uint8Array | undefined;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_square(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {boolean}
*/
export function caml_pasta_fq_is_square(x: Uint8Array): boolean;
/**
* @param {Uint8Array} x
* @returns {Uint8Array | undefined}
*/
export function caml_pasta_fq_sqrt(x: Uint8Array): Uint8Array | undefined;
/**
* @param {number} i
* @returns {Uint8Array}
*/
export function caml_pasta_fq_of_int(i: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {string}
*/
export function caml_pasta_fq_to_string(x: Uint8Array): string;
/**
* @param {string} s
* @returns {Uint8Array}
*/
export function caml_pasta_fq_of_string(s: string): Uint8Array;
/**
* @param {Uint8Array} x
*/
export function caml_pasta_fq_print(x: Uint8Array): void;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {number}
*/
export function caml_pasta_fq_compare(x: Uint8Array, y: Uint8Array): number;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {boolean}
*/
export function caml_pasta_fq_equal(x: Uint8Array, y: Uint8Array): boolean;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fq_random(): Uint8Array;
/**
* @param {number} i
* @returns {Uint8Array}
*/
export function caml_pasta_fq_rng(i: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_to_bigint(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_of_bigint(x: Uint8Array): Uint8Array;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_fq_two_adic_root_of_unity(): Uint8Array;
/**
* @param {number} log2_size
* @returns {Uint8Array}
*/
export function caml_pasta_fq_domain_generator(log2_size: number): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_to_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_of_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_pasta_fq_deep_copy(x: Uint8Array): Uint8Array;
/**
* @param {Uint32Array} lgr_comm
* @param {WasmPastaFqPlonkVerifierIndex} index
* @param {WasmPastaFqProverProof} proof
* @returns {WasmPastaFqPlonkOracles}
*/
export function caml_pasta_fq_plonk_oracles_create(lgr_comm: Uint32Array, index: WasmPastaFqPlonkVerifierIndex, proof: WasmPastaFqProverProof): WasmPastaFqPlonkOracles;
/**
* @returns {WasmPastaFqPlonkOracles}
*/
export function caml_pasta_fq_plonk_oracles_dummy(): WasmPastaFqPlonkOracles;
/**
* @param {WasmPastaFqPlonkOracles} x
* @returns {WasmPastaFqPlonkOracles}
*/
export function caml_pasta_fq_plonk_oracles_deep_copy(x: WasmPastaFqPlonkOracles): WasmPastaFqPlonkOracles;
/**
* @param {number | undefined} offset
* @param {WasmPastaFpUrs} urs
* @param {string} path
* @returns {WasmPastaFpPlonkVerifierIndex}
*/
export function caml_pasta_fp_plonk_verifier_index_read(offset: number | undefined, urs: WasmPastaFpUrs, path: string): WasmPastaFpPlonkVerifierIndex;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFpPlonkVerifierIndex} index
* @param {string} path
*/
export function caml_pasta_fp_plonk_verifier_index_write(append: boolean | undefined, index: WasmPastaFpPlonkVerifierIndex, path: string): void;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {WasmPastaFpPlonkVerifierIndex}
*/
export function caml_pasta_fp_plonk_verifier_index_create(index: WasmPastaFpPlonkIndex): WasmPastaFpPlonkVerifierIndex;
/**
* @param {number} log2_size
* @returns {WasmPastaFpPlonkVerificationShifts}
*/
export function caml_pasta_fp_plonk_verifier_index_shifts(log2_size: number): WasmPastaFpPlonkVerificationShifts;
/**
* @returns {WasmPastaFpPlonkVerifierIndex}
*/
export function caml_pasta_fp_plonk_verifier_index_dummy(): WasmPastaFpPlonkVerifierIndex;
/**
* @param {WasmPastaFpPlonkVerifierIndex} x
* @returns {WasmPastaFpPlonkVerifierIndex}
*/
export function caml_pasta_fp_plonk_verifier_index_deep_copy(x: WasmPastaFpPlonkVerifierIndex): WasmPastaFpPlonkVerifierIndex;
/**
* @param {number | undefined} offset
* @param {WasmPastaFqUrs} urs
* @param {string} path
* @returns {WasmPastaFqPlonkVerifierIndex}
*/
export function caml_pasta_fq_plonk_verifier_index_read(offset: number | undefined, urs: WasmPastaFqUrs, path: string): WasmPastaFqPlonkVerifierIndex;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFqPlonkVerifierIndex} index
* @param {string} path
*/
export function caml_pasta_fq_plonk_verifier_index_write(append: boolean | undefined, index: WasmPastaFqPlonkVerifierIndex, path: string): void;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {WasmPastaFqPlonkVerifierIndex}
*/
export function caml_pasta_fq_plonk_verifier_index_create(index: WasmPastaFqPlonkIndex): WasmPastaFqPlonkVerifierIndex;
/**
* @param {number} log2_size
* @returns {WasmPastaFqPlonkVerificationShifts}
*/
export function caml_pasta_fq_plonk_verifier_index_shifts(log2_size: number): WasmPastaFqPlonkVerificationShifts;
/**
* @returns {WasmPastaFqPlonkVerifierIndex}
*/
export function caml_pasta_fq_plonk_verifier_index_dummy(): WasmPastaFqPlonkVerifierIndex;
/**
* @param {WasmPastaFqPlonkVerifierIndex} x
* @returns {WasmPastaFqPlonkVerifierIndex}
*/
export function caml_pasta_fq_plonk_verifier_index_deep_copy(x: WasmPastaFqPlonkVerifierIndex): WasmPastaFqPlonkVerifierIndex;
/**
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_one(): WasmPallasGProjective;
/**
* @param {WasmPallasGProjective} x
* @param {WasmPallasGProjective} y
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_add(x: WasmPallasGProjective, y: WasmPallasGProjective): WasmPallasGProjective;
/**
* @param {WasmPallasGProjective} x
* @param {WasmPallasGProjective} y
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_sub(x: WasmPallasGProjective, y: WasmPallasGProjective): WasmPallasGProjective;
/**
* @param {WasmPallasGProjective} x
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_negate(x: WasmPallasGProjective): WasmPallasGProjective;
/**
* @param {WasmPallasGProjective} x
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_double(x: WasmPallasGProjective): WasmPallasGProjective;
/**
* @param {WasmPallasGProjective} x
* @param {Uint8Array} y
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_scale(x: WasmPallasGProjective, y: Uint8Array): WasmPallasGProjective;
/**
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_random(): WasmPallasGProjective;
/**
* @param {number} i
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_rng(i: number): WasmPallasGProjective;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_pallas_endo_base(): Uint8Array;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_pallas_endo_scalar(): Uint8Array;
/**
* @param {WasmPallasGProjective} x
* @returns {WasmPallasGAffine}
*/
export function caml_pasta_pallas_to_affine(x: WasmPallasGProjective): WasmPallasGAffine;
/**
* @param {WasmPallasGAffine} x
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_of_affine(x: WasmPallasGAffine): WasmPallasGProjective;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {WasmPallasGProjective}
*/
export function caml_pasta_pallas_of_affine_coordinates(x: Uint8Array, y: Uint8Array): WasmPallasGProjective;
/**
* @param {WasmPallasGAffine} x
* @returns {WasmPallasGAffine}
*/
export function caml_pasta_pallas_affine_deep_copy(x: WasmPallasGAffine): WasmPallasGAffine;
/**
* @returns {WasmPallasGAffine}
*/
export function caml_pasta_pallas_affine_one(): WasmPallasGAffine;
/**
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_one(): WasmVestaGProjective;
/**
* @param {WasmVestaGProjective} x
* @param {WasmVestaGProjective} y
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_add(x: WasmVestaGProjective, y: WasmVestaGProjective): WasmVestaGProjective;
/**
* @param {WasmVestaGProjective} x
* @param {WasmVestaGProjective} y
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_sub(x: WasmVestaGProjective, y: WasmVestaGProjective): WasmVestaGProjective;
/**
* @param {WasmVestaGProjective} x
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_negate(x: WasmVestaGProjective): WasmVestaGProjective;
/**
* @param {WasmVestaGProjective} x
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_double(x: WasmVestaGProjective): WasmVestaGProjective;
/**
* @param {WasmVestaGProjective} x
* @param {Uint8Array} y
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_scale(x: WasmVestaGProjective, y: Uint8Array): WasmVestaGProjective;
/**
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_random(): WasmVestaGProjective;
/**
* @param {number} i
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_rng(i: number): WasmVestaGProjective;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_vesta_endo_base(): Uint8Array;
/**
* @returns {Uint8Array}
*/
export function caml_pasta_vesta_endo_scalar(): Uint8Array;
/**
* @param {WasmVestaGProjective} x
* @returns {WasmVestaGAffine}
*/
export function caml_pasta_vesta_to_affine(x: WasmVestaGProjective): WasmVestaGAffine;
/**
* @param {WasmVestaGAffine} x
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_of_affine(x: WasmVestaGAffine): WasmVestaGProjective;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {WasmVestaGProjective}
*/
export function caml_pasta_vesta_of_affine_coordinates(x: Uint8Array, y: Uint8Array): WasmVestaGProjective;
/**
* @param {WasmVestaGAffine} x
* @returns {WasmVestaGAffine}
*/
export function caml_pasta_vesta_affine_deep_copy(x: WasmVestaGAffine): WasmVestaGAffine;
/**
* @returns {WasmVestaGAffine}
*/
export function caml_pasta_vesta_affine_one(): WasmVestaGAffine;
/**
* @param {number} depth
* @returns {WasmPastaFpUrs}
*/
export function caml_pasta_fp_urs_create(depth: number): WasmPastaFpUrs;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFpUrs} urs
* @param {string} path
*/
export function caml_pasta_fp_urs_write(append: boolean | undefined, urs: WasmPastaFpUrs, path: string): void;
/**
* @param {number | undefined} offset
* @param {string} path
* @returns {WasmPastaFpUrs | undefined}
*/
export function caml_pasta_fp_urs_read(offset: number | undefined, path: string): WasmPastaFpUrs | undefined;
/**
* @param {WasmPastaFpUrs} urs
* @param {number} domain_size
* @param {number} i
* @returns {WasmPastaVestaPolyComm}
*/
export function caml_pasta_fp_urs_lagrange_commitment(urs: WasmPastaFpUrs, domain_size: number, i: number): WasmPastaVestaPolyComm;
/**
* @param {WasmPastaFpUrs} urs
* @param {number} domain_size
* @param {Uint8Array} evals
* @returns {WasmPastaVestaPolyComm}
*/
export function caml_pasta_fp_urs_commit_evaluations(urs: WasmPastaFpUrs, domain_size: number, evals: Uint8Array): WasmPastaVestaPolyComm;
/**
* @param {WasmPastaFpUrs} urs
* @param {Uint8Array} chals
* @returns {WasmPastaVestaPolyComm}
*/
export function caml_pasta_fp_urs_b_poly_commitment(urs: WasmPastaFpUrs, chals: Uint8Array): WasmPastaVestaPolyComm;
/**
* @param {WasmPastaFpUrs} urs
* @param {Uint32Array} comms
* @param {Uint8Array} chals
* @returns {boolean}
*/
export function caml_pasta_fp_urs_batch_accumulator_check(urs: WasmPastaFpUrs, comms: Uint32Array, chals: Uint8Array): boolean;
/**
* @param {WasmPastaFpUrs} urs
* @returns {WasmVestaGAffine}
*/
export function caml_pasta_fp_urs_h(urs: WasmPastaFpUrs): WasmVestaGAffine;
/**
* @param {number} depth
* @returns {WasmPastaFqUrs}
*/
export function caml_pasta_fq_urs_create(depth: number): WasmPastaFqUrs;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFqUrs} urs
* @param {string} path
*/
export function caml_pasta_fq_urs_write(append: boolean | undefined, urs: WasmPastaFqUrs, path: string): void;
/**
* @param {number | undefined} offset
* @param {string} path
* @returns {WasmPastaFqUrs | undefined}
*/
export function caml_pasta_fq_urs_read(offset: number | undefined, path: string): WasmPastaFqUrs | undefined;
/**
* @param {WasmPastaFqUrs} urs
* @param {number} domain_size
* @param {number} i
* @returns {WasmPastaPallasPolyComm}
*/
export function caml_pasta_fq_urs_lagrange_commitment(urs: WasmPastaFqUrs, domain_size: number, i: number): WasmPastaPallasPolyComm;
/**
* @param {WasmPastaFqUrs} urs
* @param {number} domain_size
* @param {Uint8Array} evals
* @returns {WasmPastaPallasPolyComm}
*/
export function caml_pasta_fq_urs_commit_evaluations(urs: WasmPastaFqUrs, domain_size: number, evals: Uint8Array): WasmPastaPallasPolyComm;
/**
* @param {WasmPastaFqUrs} urs
* @param {Uint8Array} chals
* @returns {WasmPastaPallasPolyComm}
*/
export function caml_pasta_fq_urs_b_poly_commitment(urs: WasmPastaFqUrs, chals: Uint8Array): WasmPastaPallasPolyComm;
/**
* @param {WasmPastaFqUrs} urs
* @param {Uint32Array} comms
* @param {Uint8Array} chals
* @returns {boolean}
*/
export function caml_pasta_fq_urs_batch_accumulator_check(urs: WasmPastaFqUrs, comms: Uint32Array, chals: Uint8Array): boolean;
/**
* @param {WasmPastaFqUrs} urs
* @returns {WasmPallasGAffine}
*/
export function caml_pasta_fq_urs_h(urs: WasmPastaFqUrs): WasmPallasGAffine;
/**
* @returns {WasmPastaFqPlonkGateVector}
*/
export function caml_pasta_fq_plonk_gate_vector_create(): WasmPastaFqPlonkGateVector;
/**
* @param {WasmPastaFqPlonkGateVector} v
* @param {WasmPastaFqPlonkGate} gate
*/
export function caml_pasta_fq_plonk_gate_vector_add(v: WasmPastaFqPlonkGateVector, gate: WasmPastaFqPlonkGate): void;
/**
* @param {WasmPastaFqPlonkGateVector} v
* @param {number} i
* @returns {WasmPastaFqPlonkGate}
*/
export function caml_pasta_fq_plonk_gate_vector_get(v: WasmPastaFqPlonkGateVector, i: number): WasmPastaFqPlonkGate;
/**
* @param {WasmPastaFqPlonkGateVector} v
* @param {WasmPlonkWire} t
* @param {WasmPlonkWire} h
*/
export function caml_pasta_fq_plonk_gate_vector_wrap(v: WasmPastaFqPlonkGateVector, t: WasmPlonkWire, h: WasmPlonkWire): void;
/**
* @param {WasmPastaFqPlonkGateVector} gates
* @param {number} public_
* @param {WasmPastaFqUrs} urs
* @returns {WasmPastaFqPlonkIndex}
*/
export function caml_pasta_fq_plonk_index_create(gates: WasmPastaFqPlonkGateVector, public_: number, urs: WasmPastaFqUrs): WasmPastaFqPlonkIndex;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fq_plonk_index_max_degree(index: WasmPastaFqPlonkIndex): number;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fq_plonk_index_public_inputs(index: WasmPastaFqPlonkIndex): number;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fq_plonk_index_domain_d1_size(index: WasmPastaFqPlonkIndex): number;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fq_plonk_index_domain_d4_size(index: WasmPastaFqPlonkIndex): number;
/**
* @param {WasmPastaFqPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fq_plonk_index_domain_d8_size(index: WasmPastaFqPlonkIndex): number;
/**
* @param {number | undefined} offset
* @param {WasmPastaFqUrs} urs
* @param {string} path
* @returns {WasmPastaFqPlonkIndex}
*/
export function caml_pasta_fq_plonk_index_read(offset: number | undefined, urs: WasmPastaFqUrs, path: string): WasmPastaFqPlonkIndex;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFqPlonkIndex} index
* @param {string} path
*/
export function caml_pasta_fq_plonk_index_write(append: boolean | undefined, index: WasmPastaFqPlonkIndex, path: string): void;
/**
* @returns {WasmPastaFpPlonkGateVector}
*/
export function caml_pasta_fp_plonk_gate_vector_create(): WasmPastaFpPlonkGateVector;
/**
* @param {WasmPastaFpPlonkGateVector} v
* @param {WasmPastaFpPlonkGate} gate
*/
export function caml_pasta_fp_plonk_gate_vector_add(v: WasmPastaFpPlonkGateVector, gate: WasmPastaFpPlonkGate): void;
/**
* @param {WasmPastaFpPlonkGateVector} v
* @param {number} i
* @returns {WasmPastaFpPlonkGate}
*/
export function caml_pasta_fp_plonk_gate_vector_get(v: WasmPastaFpPlonkGateVector, i: number): WasmPastaFpPlonkGate;
/**
* @param {WasmPastaFpPlonkGateVector} v
* @param {WasmPlonkWire} t
* @param {WasmPlonkWire} h
*/
export function caml_pasta_fp_plonk_gate_vector_wrap(v: WasmPastaFpPlonkGateVector, t: WasmPlonkWire, h: WasmPlonkWire): void;
/**
* @param {WasmPastaFpPlonkGateVector} gates
* @param {number} public_
* @param {WasmPastaFpUrs} urs
* @returns {WasmPastaFpPlonkIndex}
*/
export function caml_pasta_fp_plonk_index_create(gates: WasmPastaFpPlonkGateVector, public_: number, urs: WasmPastaFpUrs): WasmPastaFpPlonkIndex;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fp_plonk_index_max_degree(index: WasmPastaFpPlonkIndex): number;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fp_plonk_index_public_inputs(index: WasmPastaFpPlonkIndex): number;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fp_plonk_index_domain_d1_size(index: WasmPastaFpPlonkIndex): number;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fp_plonk_index_domain_d4_size(index: WasmPastaFpPlonkIndex): number;
/**
* @param {WasmPastaFpPlonkIndex} index
* @returns {number}
*/
export function caml_pasta_fp_plonk_index_domain_d8_size(index: WasmPastaFpPlonkIndex): number;
/**
* @param {number | undefined} offset
* @param {WasmPastaFpUrs} urs
* @param {string} path
* @returns {WasmPastaFpPlonkIndex}
*/
export function caml_pasta_fp_plonk_index_read(offset: number | undefined, urs: WasmPastaFpUrs, path: string): WasmPastaFpPlonkIndex;
/**
* @param {boolean | undefined} append
* @param {WasmPastaFpPlonkIndex} index
* @param {string} path
*/
export function caml_pasta_fp_plonk_index_write(append: boolean | undefined, index: WasmPastaFpPlonkIndex, path: string): void;
/**
* @param {string} s
* @param {number} _len
* @param {number} base
* @returns {Uint8Array}
*/
export function caml_bigint_256_of_numeral(s: string, _len: number, base: number): Uint8Array;
/**
* @param {string} s
* @returns {Uint8Array}
*/
export function caml_bigint_256_of_decimal_string(s: string): Uint8Array;
/**
* @returns {number}
*/
export function caml_bigint_256_num_limbs(): number;
/**
* @returns {number}
*/
export function caml_bigint_256_bytes_per_limb(): number;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {Uint8Array}
*/
export function caml_bigint_256_div(x: Uint8Array, y: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @param {Uint8Array} y
* @returns {number}
*/
export function caml_bigint_256_compare(x: Uint8Array, y: Uint8Array): number;
/**
* @param {Uint8Array} x
*/
export function caml_bigint_256_print(x: Uint8Array): void;
/**
* @param {Uint8Array} x
* @returns {string}
*/
export function caml_bigint_256_to_string(x: Uint8Array): string;
/**
* @param {Uint8Array} x
* @param {number} i
* @returns {boolean}
*/
export function caml_bigint_256_test_bit(x: Uint8Array, i: number): boolean;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_bigint_256_to_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_bigint_256_of_bytes(x: Uint8Array): Uint8Array;
/**
* @param {Uint8Array} x
* @returns {Uint8Array}
*/
export function caml_bigint_256_deep_copy(x: Uint8Array): Uint8Array;
/**
* @param {string} name
*/
export function greet(name: string): void;
/**
* @param {string} s
*/
export function console_log(s: string): void;
/**
* @returns {number}
*/
export function create_zero_u32_ptr(): number;
/**
* @param {number} ptr
*/
export function free_u32_ptr(ptr: number): void;
/**
* @param {number} ptr
* @param {number} arg
*/
export function set_u32_ptr(ptr: number, arg: number): void;
/**
* @param {number} ptr
* @returns {number}
*/
export function wait_until_non_zero(ptr: number): number;
/**
* @param {WasmPastaFqPlonkIndex} index
* @param {Uint8Array} primary_input
* @param {Uint8Array} auxiliary_input
* @param {Uint8Array} prev_challenges
* @param {Uint32Array} prev_sgs
* @returns {WasmPastaFqProverProof}
*/
export function caml_pasta_fq_plonk_proof_create(index: WasmPastaFqPlonkIndex, primary_input: Uint8Array, auxiliary_input: Uint8Array, prev_challenges: Uint8Array, prev_sgs: Uint32Array): WasmPastaFqProverProof;
/**
* @param {Uint32Array} lgr_comm
* @param {WasmPastaFqPlonkVerifierIndex} index
* @param {WasmPastaFqProverProof} proof
* @returns {boolean}
*/
export function caml_pasta_fq_plonk_proof_verify(lgr_comm: Uint32Array, index: WasmPastaFqPlonkVerifierIndex, proof: WasmPastaFqProverProof): boolean;
/**
* @param {WasmVecVecPallasPolyComm} lgr_comms
* @param {Uint32Array} indexes
* @param {Uint32Array} proofs
* @returns {boolean}
*/
export function caml_pasta_fq_plonk_proof_batch_verify(lgr_comms: WasmVecVecPallasPolyComm, indexes: Uint32Array, proofs: Uint32Array): boolean;
/**
* @returns {WasmPastaFqProverProof}
*/
export function caml_pasta_fq_plonk_proof_dummy(): WasmPastaFqProverProof;
/**
* @param {WasmPastaFqProverProof} x
* @returns {WasmPastaFqProverProof}
*/
export function caml_pasta_fq_plonk_proof_deep_copy(x: WasmPastaFqProverProof): WasmPastaFqProverProof;
/**
* @param {number} num_threads
* @returns {Promise<any>}
*/
export function initThreadPool(num_threads: number): Promise<any>;
/**
* @param {number} receiver
*/
export function wbg_rayon_start_worker(receiver: number): void;
/**
*/
export enum WasmPlonkGateType {
Zero,
Generic,
Poseidon,
Add1,
Add2,
Vbmul1,
Vbmul2,
Vbmul3,
Endomul1,
Endomul2,
Endomul3,
Endomul4,
}
/**
*/
export enum WasmPlonkCol {
L,
R,
O,
}
/**
*/
export class WasmPallasGAffine {
free(): void;
/**
*/
infinity: boolean;
/**
*/
x: Uint8Array;
/**
*/
y: Uint8Array;
}
/**
*/
export class WasmPallasGProjective {
free(): void;
}
/**
*/
export class WasmPastaFpOpeningProof {
free(): void;
/**
* @param {Uint32Array} lr_0
* @param {Uint32Array} lr_1
* @param {WasmVestaGAffine} delta
* @param {Uint8Array} z1
* @param {Uint8Array} z2
* @param {WasmVestaGAffine} sg
*/
constructor(lr_0: Uint32Array, lr_1: Uint32Array, delta: WasmVestaGAffine, z1: Uint8Array, z2: Uint8Array, sg: WasmVestaGAffine);
/**
* @returns {WasmVestaGAffine}
*/
delta: WasmVestaGAffine;
/**
* @returns {Uint32Array}
*/
lr_0: Uint32Array;
/**
* @returns {Uint32Array}
*/
lr_1: Uint32Array;
/**
* @returns {WasmVestaGAffine}
*/
sg: WasmVestaGAffine;
/**
*/
z1: Uint8Array;
/**
*/
z2: Uint8Array;
}
/**
*/
export class WasmPastaFpPlonkDomain {
free(): void;
/**
* @param {number} log_size_of_group
* @param {Uint8Array} group_gen
*/
constructor(log_size_of_group: number, group_gen: Uint8Array);
/**
*/
group_gen: Uint8Array;
/**
*/
log_size_of_group: number;
}
/**
*/
export class WasmPastaFpPlonkGate {
free(): void;
/**
* @param {number} typ
* @param {WasmPlonkWires} wires
* @param {Uint8Array} c
*/
constructor(typ: number, wires: WasmPlonkWires, c: Uint8Array);
/**
* @returns {Uint8Array}
*/
c: Uint8Array;
/**
*/
typ: number;
/**
*/
wires: WasmPlonkWires;
}
/**
*/
export class WasmPastaFpPlonkGateVector {
free(): void;
}
/**
*/
export class WasmPastaFpPlonkIndex {
free(): void;
}
/**
*/
export class WasmPastaFpPlonkOracles {
free(): void;
/**
* @param {Uint8Array} o
* @param {Uint8Array} p_eval0
* @param {Uint8Array} p_eval1
* @param {Uint8Array} opening_prechallenges
* @param {Uint8Array} digest_before_evaluations
*/
constructor(o: Uint8Array, p_eval0: Uint8Array, p_eval1: Uint8Array, opening_prechallenges: Uint8Array, digest_before_evaluations: Uint8Array);
/**
*/
digest_before_evaluations: Uint8Array;
/**
*/
o: Uint8Array;
/**
* @returns {Uint8Array}
*/
opening_prechallenges: Uint8Array;
/**
*/
p_eval0: Uint8Array;
/**
*/
p_eval1: Uint8Array;
}
/**
*/
export class WasmPastaFpPlonkVerificationEvals {
free(): void;
/**
* @param {WasmPastaVestaPolyComm} sigma_comm0
* @param {WasmPastaVestaPolyComm} sigma_comm1
* @param {WasmPastaVestaPolyComm} sigma_comm2
* @param {WasmPastaVestaPolyComm} ql_comm
* @param {WasmPastaVestaPolyComm} qr_comm
* @param {WasmPastaVestaPolyComm} qo_comm
* @param {WasmPastaVestaPolyComm} qm_comm
* @param {WasmPastaVestaPolyComm} qc_comm
* @param {WasmPastaVestaPolyComm} rcm_comm0
* @param {WasmPastaVestaPolyComm} rcm_comm1
* @param {WasmPastaVestaPolyComm} rcm_comm2
* @param {WasmPastaVestaPolyComm} psm_comm
* @param {WasmPastaVestaPolyComm} add_comm
* @param {WasmPastaVestaPolyComm} mul1_comm
* @param {WasmPastaVestaPolyComm} mul2_comm
* @param {WasmPastaVestaPolyComm} emul1_comm
* @param {WasmPastaVestaPolyComm} emul2_comm
* @param {WasmPastaVestaPolyComm} emul3_comm
*/
constructor(sigma_comm0: WasmPastaVestaPolyComm, sigma_comm1: WasmPastaVestaPolyComm, sigma_comm2: WasmPastaVestaPolyComm, ql_comm: WasmPastaVestaPolyComm, qr_comm: WasmPastaVestaPolyComm, qo_comm: WasmPastaVestaPolyComm, qm_comm: WasmPastaVestaPolyComm, qc_comm: WasmPastaVestaPolyComm, rcm_comm0: WasmPastaVestaPolyComm, rcm_comm1: WasmPastaVestaPolyComm, rcm_comm2: WasmPastaVestaPolyComm, psm_comm: WasmPastaVestaPolyComm, add_comm: WasmPastaVestaPolyComm, mul1_comm: WasmPastaVestaPolyComm, mul2_comm: WasmPastaVestaPolyComm, emul1_comm: WasmPastaVestaPolyComm, emul2_comm: WasmPastaVestaPolyComm, emul3_comm: WasmPastaVestaPolyComm);
/**
* @returns {WasmPastaVestaPolyComm}
*/
add_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
emul1_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
emul2_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
emul3_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
mul1_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
mul2_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
psm_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
qc_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
ql_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
qm_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
qo_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
qr_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
rcm_comm0: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
rcm_comm1: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
rcm_comm2: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
sigma_comm0: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
sigma_comm1: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
sigma_comm2: WasmPastaVestaPolyComm;
}
/**
*/
export class WasmPastaFpPlonkVerificationShifts {
free(): void;
/**
* @param {Uint8Array} r
* @param {Uint8Array} o
*/
constructor(r: Uint8Array, o: Uint8Array);
/**
*/
o: Uint8Array;
/**
*/
r: Uint8Array;
}
/**
*/
export class WasmPastaFpPlonkVerifierIndex {
free(): void;
/**
* @param {WasmPastaFpPlonkDomain} domain
* @param {number} max_poly_size
* @param {number} max_quot_size
* @param {WasmPastaFpUrs} urs
* @param {WasmPastaFpPlonkVerificationEvals} evals
* @param {WasmPastaFpPlonkVerificationShifts} shifts
*/
constructor(domain: WasmPastaFpPlonkDomain, max_poly_size: number, max_quot_size: number, urs: WasmPastaFpUrs, evals: WasmPastaFpPlonkVerificationEvals, shifts: WasmPastaFpPlonkVerificationShifts);
/**
*/
domain: WasmPastaFpPlonkDomain;
/**
* @returns {WasmPastaFpPlonkVerificationEvals}
*/
evals: WasmPastaFpPlonkVerificationEvals;
/**
*/
max_poly_size: number;
/**
*/
max_quot_size: number;
/**
*/
shifts: WasmPastaFpPlonkVerificationShifts;
/**
* @returns {WasmPastaFpUrs}
*/
urs: WasmPastaFpUrs;
}
/**
*/
export class WasmPastaFpProofEvaluations {
free(): void;
/**
* @param {Uint8Array} l
* @param {Uint8Array} r
* @param {Uint8Array} o
* @param {Uint8Array} z
* @param {Uint8Array} t
* @param {Uint8Array} f
* @param {Uint8Array} sigma1
* @param {Uint8Array} sigma2
*/
constructor(l: Uint8Array, r: Uint8Array, o: Uint8Array, z: Uint8Array, t: Uint8Array, f: Uint8Array, sigma1: Uint8Array, sigma2: Uint8Array);
/**
* @returns {Uint8Array}
*/
f: Uint8Array;
/**
* @returns {Uint8Array}
*/
l: Uint8Array;
/**
* @returns {Uint8Array}
*/
o: Uint8Array;
/**
* @returns {Uint8Array}
*/
r: Uint8Array;
/**
* @returns {Uint8Array}
*/
sigma1: Uint8Array;
/**
* @returns {Uint8Array}
*/
sigma2: Uint8Array;
/**
* @returns {Uint8Array}
*/
t: Uint8Array;
/**
* @returns {Uint8Array}
*/
z: Uint8Array;
}
/**
*/
export class WasmPastaFpProverCommitments {
free(): void;
/**
* @param {WasmPastaVestaPolyComm} l_comm
* @param {WasmPastaVestaPolyComm} r_comm
* @param {WasmPastaVestaPolyComm} o_comm
* @param {WasmPastaVestaPolyComm} z_comm
* @param {WasmPastaVestaPolyComm} t_comm
*/
constructor(l_comm: WasmPastaVestaPolyComm, r_comm: WasmPastaVestaPolyComm, o_comm: WasmPastaVestaPolyComm, z_comm: WasmPastaVestaPolyComm, t_comm: WasmPastaVestaPolyComm);
/**
* @returns {WasmPastaVestaPolyComm}
*/
l_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
o_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
r_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
t_comm: WasmPastaVestaPolyComm;
/**
* @returns {WasmPastaVestaPolyComm}
*/
z_comm: WasmPastaVestaPolyComm;
}
/**
*/
export class WasmPastaFpProverProof {
free(): void;
/**
* @param {WasmPastaFpProverCommitments} commitments
* @param {WasmPastaFpOpeningProof} proof
* @param {WasmPastaFpProofEvaluations} evals0
* @param {WasmPastaFpProofEvaluations} evals1
* @param {Uint8Array} public_
* @param {WasmVecVecPastaFp} prev_challenges_scalars
* @param {Uint32Array} prev_challenges_comms
*/
constructor(commitments: WasmPastaFpProverCommitments, proof: WasmPastaFpOpeningProof, evals0: WasmPastaFpProofEvaluations, evals1: WasmPastaFpProofEvaluations, public_: Uint8Array, prev_challenges_scalars: WasmVecVecPastaFp, prev_challenges_comms: Uint32Array);
/**
* @returns {WasmPastaFpProverCommitments}
*/
commitments: WasmPastaFpProverCommitments;
/**
* @returns {WasmPastaFpProofEvaluations}
*/
evals0: WasmPastaFpProofEvaluations;
/**
* @returns {WasmPastaFpProofEvaluations}
*/
evals1: WasmPastaFpProofEvaluations;
/**
* @returns {Uint32Array}
*/
prev_challenges_comms: Uint32Array;
/**
* @returns {WasmVecVecPastaFp}
*/
prev_challenges_scalars: WasmVecVecPastaFp;
/**
* @returns {WasmPastaFpOpeningProof}
*/
proof: WasmPastaFpOpeningProof;
/**
* @returns {Uint8Array}
*/
public_: Uint8Array;
}
/**
*/
export class WasmPastaFpUrs {
free(): void;
}
/**
*/
export class WasmPastaFqOpeningProof {
free(): void;
/**
* @param {Uint32Array} lr_0
* @param {Uint32Array} lr_1
* @param {WasmPallasGAffine} delta
* @param {Uint8Array} z1
* @param {Uint8Array} z2
* @param {WasmPallasGAffine} sg
*/
constructor(lr_0: Uint32Array, lr_1: Uint32Array, delta: WasmPallasGAffine, z1: Uint8Array, z2: Uint8Array, sg: WasmPallasGAffine);
/**
* @returns {WasmPallasGAffine}
*/
delta: WasmPallasGAffine;
/**
* @returns {Uint32Array}
*/
lr_0: Uint32Array;
/**
* @returns {Uint32Array}
*/
lr_1: Uint32Array;
/**
* @returns {WasmPallasGAffine}
*/
sg: WasmPallasGAffine;
/**
*/
z1: Uint8Array;
/**
*/
z2: Uint8Array;
}
/**
*/
export class WasmPastaFqPlonkDomain {
free(): void;
/**
* @param {number} log_size_of_group
* @param {Uint8Array} group_gen
*/
constructor(log_size_of_group: number, group_gen: Uint8Array);
/**
*/
group_gen: Uint8Array;
/**
*/
log_size_of_group: number;
}
/**
*/
export class WasmPastaFqPlonkGate {
free(): void;
/**
* @param {number} typ
* @param {WasmPlonkWires} wires
* @param {Uint8Array} c
*/
constructor(typ: number, wires: WasmPlonkWires, c: Uint8Array);
/**
* @returns {Uint8Array}
*/
c: Uint8Array;
/**
*/
typ: number;
/**
*/
wires: WasmPlonkWires;
}
/**
*/
export class WasmPastaFqPlonkGateVector {
free(): void;
}
/**
*/
export class WasmPastaFqPlonkIndex {
free(): void;
}
/**
*/
export class WasmPastaFqPlonkOracles {
free(): void;
/**
* @param {Uint8Array} o
* @param {Uint8Array} p_eval0
* @param {Uint8Array} p_eval1
* @param {Uint8Array} opening_prechallenges
* @param {Uint8Array} digest_before_evaluations
*/
constructor(o: Uint8Array, p_eval0: Uint8Array, p_eval1: Uint8Array, opening_prechallenges: Uint8Array, digest_before_evaluations: Uint8Array);
/**
*/
digest_before_evaluations: Uint8Array;
/**
*/
o: Uint8Array;
/**
* @returns {Uint8Array}
*/
opening_prechallenges: Uint8Array;
/**
*/
p_eval0: Uint8Array;
/**
*/
p_eval1: Uint8Array;
}
/**
*/
export class WasmPastaFqPlonkVerificationEvals {
free(): void;
/**
* @param {WasmPastaPallasPolyComm} sigma_comm0
* @param {WasmPastaPallasPolyComm} sigma_comm1
* @param {WasmPastaPallasPolyComm} sigma_comm2
* @param {WasmPastaPallasPolyComm} ql_comm
* @param {WasmPastaPallasPolyComm} qr_comm
* @param {WasmPastaPallasPolyComm} qo_comm
* @param {WasmPastaPallasPolyComm} qm_comm
* @param {WasmPastaPallasPolyComm} qc_comm
* @param {WasmPastaPallasPolyComm} rcm_comm0
* @param {WasmPastaPallasPolyComm} rcm_comm1
* @param {WasmPastaPallasPolyComm} rcm_comm2
* @param {WasmPastaPallasPolyComm} psm_comm
* @param {WasmPastaPallasPolyComm} add_comm
* @param {WasmPastaPallasPolyComm} mul1_comm
* @param {WasmPastaPallasPolyComm} mul2_comm
* @param {WasmPastaPallasPolyComm} emul1_comm
* @param {WasmPastaPallasPolyComm} emul2_comm
* @param {WasmPastaPallasPolyComm} emul3_comm
*/
constructor(sigma_comm0: WasmPastaPallasPolyComm, sigma_comm1: WasmPastaPallasPolyComm, sigma_comm2: WasmPastaPallasPolyComm, ql_comm: WasmPastaPallasPolyComm, qr_comm: WasmPastaPallasPolyComm, qo_comm: WasmPastaPallasPolyComm, qm_comm: WasmPastaPallasPolyComm, qc_comm: WasmPastaPallasPolyComm, rcm_comm0: WasmPastaPallasPolyComm, rcm_comm1: WasmPastaPallasPolyComm, rcm_comm2: WasmPastaPallasPolyComm, psm_comm: WasmPastaPallasPolyComm, add_comm: WasmPastaPallasPolyComm, mul1_comm: WasmPastaPallasPolyComm, mul2_comm: WasmPastaPallasPolyComm, emul1_comm: WasmPastaPallasPolyComm, emul2_comm: WasmPastaPallasPolyComm, emul3_comm: WasmPastaPallasPolyComm);
/**
* @returns {WasmPastaPallasPolyComm}
*/
add_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
emul1_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
emul2_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
emul3_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
mul1_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
mul2_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
psm_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
qc_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
ql_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
qm_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
qo_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
qr_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
rcm_comm0: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
rcm_comm1: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
rcm_comm2: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
sigma_comm0: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
sigma_comm1: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
sigma_comm2: WasmPastaPallasPolyComm;
}
/**
*/
export class WasmPastaFqPlonkVerificationShifts {
free(): void;
/**
* @param {Uint8Array} r
* @param {Uint8Array} o
*/
constructor(r: Uint8Array, o: Uint8Array);
/**
*/
o: Uint8Array;
/**
*/
r: Uint8Array;
}
/**
*/
export class WasmPastaFqPlonkVerifierIndex {
free(): void;
/**
* @param {WasmPastaFqPlonkDomain} domain
* @param {number} max_poly_size
* @param {number} max_quot_size
* @param {WasmPastaFqUrs} urs
* @param {WasmPastaFqPlonkVerificationEvals} evals
* @param {WasmPastaFqPlonkVerificationShifts} shifts
*/
constructor(domain: WasmPastaFqPlonkDomain, max_poly_size: number, max_quot_size: number, urs: WasmPastaFqUrs, evals: WasmPastaFqPlonkVerificationEvals, shifts: WasmPastaFqPlonkVerificationShifts);
/**
*/
domain: WasmPastaFqPlonkDomain;
/**
* @returns {WasmPastaFqPlonkVerificationEvals}
*/
evals: WasmPastaFqPlonkVerificationEvals;
/**
*/
max_poly_size: number;
/**
*/
max_quot_size: number;
/**
*/
shifts: WasmPastaFqPlonkVerificationShifts;
/**
* @returns {WasmPastaFqUrs}
*/
urs: WasmPastaFqUrs;
}
/**
*/
export class WasmPastaFqProofEvaluations {
free(): void;
/**
* @param {Uint8Array} l
* @param {Uint8Array} r
* @param {Uint8Array} o
* @param {Uint8Array} z
* @param {Uint8Array} t
* @param {Uint8Array} f
* @param {Uint8Array} sigma1
* @param {Uint8Array} sigma2
*/
constructor(l: Uint8Array, r: Uint8Array, o: Uint8Array, z: Uint8Array, t: Uint8Array, f: Uint8Array, sigma1: Uint8Array, sigma2: Uint8Array);
/**
* @returns {Uint8Array}
*/
f: Uint8Array;
/**
* @returns {Uint8Array}
*/
l: Uint8Array;
/**
* @returns {Uint8Array}
*/
o: Uint8Array;
/**
* @returns {Uint8Array}
*/
r: Uint8Array;
/**
* @returns {Uint8Array}
*/
sigma1: Uint8Array;
/**
* @returns {Uint8Array}
*/
sigma2: Uint8Array;
/**
* @returns {Uint8Array}
*/
t: Uint8Array;
/**
* @returns {Uint8Array}
*/
z: Uint8Array;
}
/**
*/
export class WasmPastaFqProverCommitments {
free(): void;
/**
* @param {WasmPastaPallasPolyComm} l_comm
* @param {WasmPastaPallasPolyComm} r_comm
* @param {WasmPastaPallasPolyComm} o_comm
* @param {WasmPastaPallasPolyComm} z_comm
* @param {WasmPastaPallasPolyComm} t_comm
*/
constructor(l_comm: WasmPastaPallasPolyComm, r_comm: WasmPastaPallasPolyComm, o_comm: WasmPastaPallasPolyComm, z_comm: WasmPastaPallasPolyComm, t_comm: WasmPastaPallasPolyComm);
/**
* @returns {WasmPastaPallasPolyComm}
*/
l_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
o_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
r_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
t_comm: WasmPastaPallasPolyComm;
/**
* @returns {WasmPastaPallasPolyComm}
*/
z_comm: WasmPastaPallasPolyComm;
}
/**
*/
export class WasmPastaFqProverProof {
free(): void;
/**
* @param {WasmPastaFqProverCommitments} commitments
* @param {WasmPastaFqOpeningProof} proof
* @param {WasmPastaFqProofEvaluations} evals0
* @param {WasmPastaFqProofEvaluations} evals1
* @param {Uint8Array} public_
* @param {WasmVecVecPastaFq} prev_challenges_scalars
* @param {Uint32Array} prev_challenges_comms
*/
constructor(commitments: WasmPastaFqProverCommitments, proof: WasmPastaFqOpeningProof, evals0: WasmPastaFqProofEvaluations, evals1: WasmPastaFqProofEvaluations, public_: Uint8Array, prev_challenges_scalars: WasmVecVecPastaFq, prev_challenges_comms: Uint32Array);
/**
* @returns {WasmPastaFqProverCommitments}
*/
commitments: WasmPastaFqProverCommitments;
/**
* @returns {WasmPastaFqProofEvaluations}
*/
evals0: WasmPastaFqProofEvaluations;
/**
* @returns {WasmPastaFqProofEvaluations}
*/
evals1: WasmPastaFqProofEvaluations;
/**
* @returns {Uint32Array}
*/
prev_challenges_comms: Uint32Array;
/**
* @returns {WasmVecVecPastaFq}
*/
prev_challenges_scalars: WasmVecVecPastaFq;
/**
* @returns {WasmPastaFqOpeningProof}
*/
proof: WasmPastaFqOpeningProof;
/**
* @returns {Uint8Array}
*/
public_: Uint8Array;
}
/**
*/
export class WasmPastaFqUrs {
free(): void;
}
/**
*/
export class WasmPastaPallasPolyComm {
free(): void;
/**
* @param {Uint32Array} unshifted
* @param {WasmPallasGAffine | undefined} shifted
*/
constructor(unshifted: Uint32Array, shifted?: WasmPallasGAffine);
/**
*/
shifted?: WasmPallasGAffine;
/**
* @returns {Uint32Array}
*/
unshifted: Uint32Array;
}
/**
*/
export class WasmPastaVestaPolyComm {
free(): void;
/**
* @param {Uint32Array} unshifted
* @param {WasmVestaGAffine | undefined} shifted
*/
constructor(unshifted: Uint32Array, shifted?: WasmVestaGAffine);
/**
*/
shifted?: WasmVestaGAffine;
/**
* @returns {Uint32Array}
*/
unshifted: Uint32Array;
}
/**
*/
export class WasmPlonkWire {
free(): void;
/**
* @param {number} row
* @param {number} col
*/
constructor(row: number, col: number);
/**
*/
col: number;
/**
*/
row: number;
}
/**
*/
export class WasmPlonkWires {
free(): void;
/**
* @param {number} row
* @param {WasmPlonkWire} l
* @param {WasmPlonkWire} r
* @param {WasmPlonkWire} o
*/
constructor(row: number, l: WasmPlonkWire, r: WasmPlonkWire, o: WasmPlonkWire);
/**
*/
l: WasmPlonkWire;
/**
*/
o: WasmPlonkWire;
/**
*/
r: WasmPlonkWire;
/**
*/
row: number;
}
/**
*/
export class WasmVecVecPallasPolyComm {
free(): void;
/**
* @param {number} n
*/
constructor(n: number);
/**
* @param {Uint32Array} x
*/
push(x: Uint32Array): void;
}
/**
*/
export class WasmVecVecPastaFp {
free(): void;
/**
* @param {number} n
*/
constructor(n: number);
/**
* @param {Uint8Array} x
*/
push(x: Uint8Array): void;
/**
* @param {number} i
* @returns {Uint8Array}
*/
get(i: number): Uint8Array;
/**
* @param {number} i
* @param {Uint8Array} x
*/
set(i: number, x: Uint8Array): void;
}
/**
*/
export class WasmVecVecPastaFq {
free(): void;
/**
* @param {number} n
*/
constructor(n: number);
/**
* @param {Uint8Array} x
*/
push(x: Uint8Array): void;
/**
* @param {number} i
* @returns {Uint8Array}
*/
get(i: number): Uint8Array;
/**
* @param {number} i
* @param {Uint8Array} x
*/
set(i: number, x: Uint8Array): void;
}
/**
*/
export class WasmVecVecVestaPolyComm {
free(): void;
/**
* @param {number} n
*/
constructor(n: number);
/**
* @param {Uint32Array} x
*/
push(x: Uint32Array): void;
}
/**
*/
export class WasmVestaGAffine {
free(): void;
/**
*/
infinity: boolean;
/**
*/
x: Uint8Array;
/**
*/
y: Uint8Array;
}
/**
*/
export class WasmVestaGProjective {
free(): void;
}
/**
*/
export class wbg_rayon_PoolBuilder {
free(): void;
/**
* @returns {string}
*/
mainJS(): string;
/**
* @returns {number}
*/
numThreads(): number;
/**
* @returns {number}
*/
receiver(): number;
/**
*/
build(): void;
}
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
export interface InitOutput {
readonly __wasm_init_memory: () => void;
readonly __wbg_wasmpastafpplonkoracles_free: (a: number) => void;
readonly __wbg_get_wasmpastafpplonkoracles_o: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkoracles_o: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafpplonkoracles_p_eval0: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkoracles_p_eval0: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafpplonkoracles_p_eval1: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkoracles_p_eval1: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafpplonkoracles_digest_before_evaluations: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkoracles_digest_before_evaluations: (a: number, b: number, c: number) => void;
readonly wasmpastafpplonkoracles_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => number;
readonly wasmpastafpplonkoracles_opening_prechallenges: (a: number, b: number) => void;
readonly wasmpastafpplonkoracles_set_opening_prechallenges: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_plonk_oracles_create: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fp_plonk_oracles_dummy: () => number;
readonly caml_pasta_fp_plonk_oracles_deep_copy: (a: number) => number;
readonly __wbg_wasmpastafpproofevaluations_free: (a: number) => void;
readonly wasmpastafpproofevaluations_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number, k: number, l: number, m: number, n: number, o: number, p: number) => number;
readonly wasmpastafpproofevaluations_l: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_r: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_o: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_z: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_t: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_f: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_sigma1: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_sigma2: (a: number, b: number) => void;
readonly wasmpastafpproofevaluations_set_l: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_r: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_o: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_z: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_t: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_f: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_sigma1: (a: number, b: number, c: number) => void;
readonly wasmpastafpproofevaluations_set_sigma2: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastafpopeningproof_free: (a: number) => void;
readonly __wbg_get_wasmpastafpopeningproof_z1: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpopeningproof_z1: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafpopeningproof_z2: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpopeningproof_z2: (a: number, b: number, c: number) => void;
readonly wasmpastafpopeningproof_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => number;
readonly wasmpastafpopeningproof_lr_0: (a: number, b: number) => void;
readonly wasmpastafpopeningproof_lr_1: (a: number, b: number) => void;
readonly wasmpastafpopeningproof_delta: (a: number) => number;
readonly wasmpastafpopeningproof_sg: (a: number) => number;
readonly wasmpastafpopeningproof_set_lr_0: (a: number, b: number, c: number) => void;
readonly wasmpastafpopeningproof_set_lr_1: (a: number, b: number, c: number) => void;
readonly wasmpastafpopeningproof_set_delta: (a: number, b: number) => void;
readonly wasmpastafpopeningproof_set_sg: (a: number, b: number) => void;
readonly __wbg_wasmpastafpprovercommitments_free: (a: number) => void;
readonly wasmpastafpprovercommitments_new: (a: number, b: number, c: number, d: number, e: number) => number;
readonly wasmpastafpprovercommitments_l_comm: (a: number) => number;
readonly wasmpastafpprovercommitments_r_comm: (a: number) => number;
readonly wasmpastafpprovercommitments_o_comm: (a: number) => number;
readonly wasmpastafpprovercommitments_z_comm: (a: number) => number;
readonly wasmpastafpprovercommitments_t_comm: (a: number) => number;
readonly wasmpastafpprovercommitments_set_l_comm: (a: number, b: number) => void;
readonly wasmpastafpprovercommitments_set_r_comm: (a: number, b: number) => void;
readonly wasmpastafpprovercommitments_set_o_comm: (a: number, b: number) => void;
readonly wasmpastafpprovercommitments_set_z_comm: (a: number, b: number) => void;
readonly wasmpastafpprovercommitments_set_t_comm: (a: number, b: number) => void;
readonly __wbg_wasmvecvecpastafp_free: (a: number) => void;
readonly wasmvecvecpastafp_create: (a: number) => number;
readonly wasmvecvecpastafp_push: (a: number, b: number, c: number) => void;
readonly wasmvecvecpastafp_get: (a: number, b: number, c: number) => void;
readonly wasmvecvecpastafp_set: (a: number, b: number, c: number, d: number) => void;
readonly __wbg_wasmpastafpproverproof_free: (a: number) => void;
readonly wasmpastafpproverproof_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => number;
readonly wasmpastafpproverproof_commitments: (a: number) => number;
readonly wasmpastafpproverproof_proof: (a: number) => number;
readonly wasmpastafpproverproof_evals0: (a: number) => number;
readonly wasmpastafpproverproof_evals1: (a: number) => number;
readonly wasmpastafpproverproof_public_: (a: number, b: number) => void;
readonly wasmpastafpproverproof_prev_challenges_scalars: (a: number) => number;
readonly wasmpastafpproverproof_prev_challenges_comms: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_commitments: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_proof: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_evals0: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_evals1: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_public_: (a: number, b: number, c: number) => void;
readonly wasmpastafpproverproof_set_prev_challenges_scalars: (a: number, b: number) => void;
readonly wasmpastafpproverproof_set_prev_challenges_comms: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_plonk_proof_create: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => number;
readonly caml_pasta_fp_plonk_proof_verify: (a: number, b: number, c: number, d: number) => number;
readonly __wbg_wasmvecvecvestapolycomm_free: (a: number) => void;
readonly wasmvecvecvestapolycomm_push: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_plonk_proof_batch_verify: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fp_plonk_proof_dummy: () => number;
readonly caml_pasta_fp_plonk_proof_deep_copy: (a: number) => number;
readonly wasmvecvecvestapolycomm_create: (a: number) => number;
readonly caml_pasta_fp_size_in_bits: () => number;
readonly caml_pasta_fp_size: (a: number) => void;
readonly caml_pasta_fp_add: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fp_sub: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fp_negate: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_mul: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fp_div: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fp_inv: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_square: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_is_square: (a: number, b: number) => number;
readonly caml_pasta_fp_sqrt: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_of_int: (a: number, b: number) => void;
readonly caml_pasta_fp_to_string: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_of_string: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_print: (a: number, b: number) => void;
readonly caml_pasta_fp_compare: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fp_equal: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fp_random: (a: number) => void;
readonly caml_pasta_fp_rng: (a: number, b: number) => void;
readonly caml_pasta_fp_to_bigint: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_of_bigint: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_two_adic_root_of_unity: (a: number) => void;
readonly caml_pasta_fp_domain_generator: (a: number, b: number) => void;
readonly caml_pasta_fp_to_bytes: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_of_bytes: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_deep_copy: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_size: (a: number) => void;
readonly caml_pasta_fq_add: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fq_sub: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fq_negate: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_mul: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fq_div: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_pasta_fq_inv: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_square: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_is_square: (a: number, b: number) => number;
readonly caml_pasta_fq_sqrt: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_of_int: (a: number, b: number) => void;
readonly caml_pasta_fq_to_string: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_of_string: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_print: (a: number, b: number) => void;
readonly caml_pasta_fq_compare: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fq_equal: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fq_random: (a: number) => void;
readonly caml_pasta_fq_rng: (a: number, b: number) => void;
readonly caml_pasta_fq_to_bigint: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_of_bigint: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_two_adic_root_of_unity: (a: number) => void;
readonly caml_pasta_fq_domain_generator: (a: number, b: number) => void;
readonly caml_pasta_fq_to_bytes: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_of_bytes: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_deep_copy: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastafqplonkoracles_free: (a: number) => void;
readonly __wbg_get_wasmpastafqplonkoracles_o: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkoracles_o: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafqplonkoracles_p_eval0: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkoracles_p_eval0: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafqplonkoracles_p_eval1: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkoracles_p_eval1: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafqplonkoracles_digest_before_evaluations: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkoracles_digest_before_evaluations: (a: number, b: number, c: number) => void;
readonly wasmpastafqplonkoracles_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => number;
readonly wasmpastafqplonkoracles_opening_prechallenges: (a: number, b: number) => void;
readonly wasmpastafqplonkoracles_set_opening_prechallenges: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_plonk_oracles_create: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fq_plonk_oracles_dummy: () => number;
readonly caml_pasta_fq_plonk_oracles_deep_copy: (a: number) => number;
readonly caml_pasta_fq_size_in_bits: () => number;
readonly __wbg_wasmpastafpplonkdomain_free: (a: number) => void;
readonly __wbg_get_wasmpastafpplonkdomain_log_size_of_group: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkdomain_log_size_of_group: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafpplonkdomain_group_gen: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkdomain_group_gen: (a: number, b: number, c: number) => void;
readonly wasmpastafpplonkdomain_new: (a: number, b: number, c: number) => number;
readonly __wbg_wasmpastafpplonkverificationevals_free: (a: number) => void;
readonly wasmpastafpplonkverificationevals_sigma_comm0: (a: number) => number;
readonly wasmpastafpplonkverificationevals_sigma_comm1: (a: number) => number;
readonly wasmpastafpplonkverificationevals_sigma_comm2: (a: number) => number;
readonly wasmpastafpplonkverificationevals_ql_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_qr_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_qo_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_qm_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_qc_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_rcm_comm0: (a: number) => number;
readonly wasmpastafpplonkverificationevals_rcm_comm1: (a: number) => number;
readonly wasmpastafpplonkverificationevals_rcm_comm2: (a: number) => number;
readonly wasmpastafpplonkverificationevals_psm_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_add_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_mul1_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_mul2_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_emul1_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_emul2_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_emul3_comm: (a: number) => number;
readonly wasmpastafpplonkverificationevals_set_sigma_comm0: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_sigma_comm1: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_sigma_comm2: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_ql_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_qr_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_qo_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_qm_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_qc_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_rcm_comm0: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_rcm_comm1: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_rcm_comm2: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_psm_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_add_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_mul1_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_mul2_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_emul1_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_emul2_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_set_emul3_comm: (a: number, b: number) => void;
readonly wasmpastafpplonkverificationevals_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number, k: number, l: number, m: number, n: number, o: number, p: number, q: number, r: number) => number;
readonly __wbg_wasmpastafpplonkverificationshifts_free: (a: number) => void;
readonly __wbg_get_wasmpastafpplonkverificationshifts_o: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafpplonkverificationshifts_o: (a: number, b: number, c: number) => void;
readonly wasmpastafpplonkverificationshifts_new: (a: number, b: number, c: number, d: number) => number;
readonly __wbg_wasmpastafpplonkverifierindex_free: (a: number) => void;
readonly __wbg_get_wasmpastafpplonkverifierindex_domain: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkverifierindex_domain: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafpplonkverifierindex_max_poly_size: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkverifierindex_max_poly_size: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafpplonkverifierindex_max_quot_size: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkverifierindex_max_quot_size: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafpplonkverifierindex_shifts: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkverifierindex_shifts: (a: number, b: number) => void;
readonly wasmpastafpplonkverifierindex_new: (a: number, b: number, c: number, d: number, e: number, f: number) => number;
readonly wasmpastafpplonkverifierindex_urs: (a: number) => number;
readonly wasmpastafpplonkverifierindex_set_urs: (a: number, b: number) => void;
readonly wasmpastafpplonkverifierindex_evals: (a: number) => number;
readonly wasmpastafpplonkverifierindex_set_evals: (a: number, b: number) => void;
readonly caml_pasta_fp_plonk_verifier_index_read: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fp_plonk_verifier_index_write: (a: number, b: number, c: number, d: number) => void;
readonly caml_pasta_fp_plonk_verifier_index_create: (a: number) => number;
readonly caml_pasta_fp_plonk_verifier_index_shifts: (a: number) => number;
readonly caml_pasta_fp_plonk_verifier_index_dummy: () => number;
readonly caml_pasta_fp_plonk_verifier_index_deep_copy: (a: number) => number;
readonly __wbg_get_wasmpastafqplonkdomain_group_gen: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkdomain_group_gen: (a: number, b: number, c: number) => void;
readonly wasmpastafqplonkdomain_new: (a: number, b: number, c: number) => number;
readonly __wbg_wasmpastafqplonkverificationevals_free: (a: number) => void;
readonly __wbg_get_wasmpastafqplonkverificationshifts_o: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkverificationshifts_o: (a: number, b: number, c: number) => void;
readonly wasmpastafqplonkverificationshifts_new: (a: number, b: number, c: number, d: number) => number;
readonly __wbg_wasmpastafqplonkverifierindex_free: (a: number) => void;
readonly wasmpastafqplonkverifierindex_new: (a: number, b: number, c: number, d: number, e: number, f: number) => number;
readonly wasmpastafqplonkverifierindex_evals: (a: number) => number;
readonly wasmpastafqplonkverifierindex_set_evals: (a: number, b: number) => void;
readonly caml_pasta_fq_plonk_verifier_index_read: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fq_plonk_verifier_index_write: (a: number, b: number, c: number, d: number) => void;
readonly caml_pasta_fq_plonk_verifier_index_create: (a: number) => number;
readonly caml_pasta_fq_plonk_verifier_index_shifts: (a: number) => number;
readonly caml_pasta_fq_plonk_verifier_index_dummy: () => number;
readonly caml_pasta_fq_plonk_verifier_index_deep_copy: (a: number) => number;
readonly wasmpastafqplonkverificationevals_sigma_comm1: (a: number) => number;
readonly wasmpastafqplonkverificationevals_sigma_comm2: (a: number) => number;
readonly wasmpastafqplonkverificationevals_ql_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_qr_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_qo_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_qm_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_qc_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_rcm_comm0: (a: number) => number;
readonly wasmpastafqplonkverificationevals_rcm_comm1: (a: number) => number;
readonly wasmpastafqplonkverificationevals_rcm_comm2: (a: number) => number;
readonly wasmpastafqplonkverificationevals_psm_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_add_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_mul1_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_mul2_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_emul1_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_emul2_comm: (a: number) => number;
readonly wasmpastafqplonkverificationevals_emul3_comm: (a: number) => number;
readonly __wbg_set_wasmpastafqplonkdomain_log_size_of_group: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkverifierindex_max_poly_size: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafqplonkverifierindex_shifts: (a: number) => number;
readonly __wbg_get_wasmpastafqplonkverifierindex_max_quot_size: (a: number) => number;
readonly wasmpastafqplonkverifierindex_urs: (a: number) => number;
readonly wasmpastafqplonkverificationevals_set_sigma_comm1: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_sigma_comm2: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_ql_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_qr_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_qo_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_qm_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_qc_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_rcm_comm0: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_rcm_comm1: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_rcm_comm2: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_psm_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_add_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_mul1_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_mul2_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_emul1_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_emul2_comm: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_set_emul3_comm: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkverifierindex_domain: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_sigma_comm0: (a: number) => number;
readonly __wbg_get_wasmpastafqplonkdomain_log_size_of_group: (a: number) => number;
readonly __wbg_get_wasmpastafqplonkverifierindex_max_poly_size: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkverificationshifts_r: (a: number, b: number, c: number) => void;
readonly __wbg_set_wasmpastafqplonkverificationshifts_r: (a: number, b: number, c: number) => void;
readonly wasmpastafqplonkverificationevals_set_sigma_comm0: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqplonkverifierindex_shifts: (a: number, b: number) => void;
readonly __wbg_wasmpastafqplonkdomain_free: (a: number) => void;
readonly __wbg_wasmpastafqplonkverificationshifts_free: (a: number) => void;
readonly __wbg_set_wasmpastafqplonkverifierindex_max_quot_size: (a: number, b: number) => void;
readonly wasmpastafqplonkverifierindex_set_urs: (a: number, b: number) => void;
readonly wasmpastafqplonkverificationevals_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number, k: number, l: number, m: number, n: number, o: number, p: number, q: number, r: number) => number;
readonly __wbg_get_wasmpastafpplonkverificationshifts_r: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafqplonkverificationshifts_r: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafqplonkverifierindex_domain: (a: number) => number;
readonly __wbg_wasmpallasgprojective_free: (a: number) => void;
readonly __wbg_wasmpallasgaffine_free: (a: number) => void;
readonly __wbg_get_wasmpallasgaffine_x: (a: number, b: number) => void;
readonly __wbg_set_wasmpallasgaffine_x: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpallasgaffine_y: (a: number, b: number) => void;
readonly __wbg_set_wasmpallasgaffine_y: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpallasgaffine_infinity: (a: number) => number;
readonly __wbg_set_wasmpallasgaffine_infinity: (a: number, b: number) => void;
readonly caml_pasta_pallas_one: () => number;
readonly caml_pasta_pallas_add: (a: number, b: number) => number;
readonly caml_pasta_pallas_sub: (a: number, b: number) => number;
readonly caml_pasta_pallas_negate: (a: number) => number;
readonly caml_pasta_pallas_double: (a: number) => number;
readonly caml_pasta_pallas_scale: (a: number, b: number, c: number) => number;
readonly caml_pasta_pallas_random: () => number;
readonly caml_pasta_pallas_rng: (a: number) => number;
readonly caml_pasta_pallas_endo_base: (a: number) => void;
readonly caml_pasta_pallas_endo_scalar: (a: number) => void;
readonly caml_pasta_pallas_to_affine: (a: number) => number;
readonly caml_pasta_pallas_of_affine: (a: number) => number;
readonly caml_pasta_pallas_of_affine_coordinates: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_pallas_affine_deep_copy: (a: number) => number;
readonly caml_pasta_pallas_affine_one: () => number;
readonly __wbg_get_wasmvestagaffine_x: (a: number, b: number) => void;
readonly __wbg_set_wasmvestagaffine_x: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmvestagaffine_y: (a: number, b: number) => void;
readonly __wbg_set_wasmvestagaffine_y: (a: number, b: number, c: number) => void;
readonly caml_pasta_vesta_one: () => number;
readonly caml_pasta_vesta_add: (a: number, b: number) => number;
readonly caml_pasta_vesta_sub: (a: number, b: number) => number;
readonly caml_pasta_vesta_negate: (a: number) => number;
readonly caml_pasta_vesta_double: (a: number) => number;
readonly caml_pasta_vesta_scale: (a: number, b: number, c: number) => number;
readonly caml_pasta_vesta_random: () => number;
readonly caml_pasta_vesta_rng: (a: number) => number;
readonly caml_pasta_vesta_endo_base: (a: number) => void;
readonly caml_pasta_vesta_endo_scalar: (a: number) => void;
readonly caml_pasta_vesta_to_affine: (a: number) => number;
readonly caml_pasta_vesta_of_affine: (a: number) => number;
readonly caml_pasta_vesta_of_affine_coordinates: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_vesta_affine_one: () => number;
readonly __wbg_set_wasmvestagaffine_infinity: (a: number, b: number) => void;
readonly __wbg_get_wasmvestagaffine_infinity: (a: number) => number;
readonly caml_pasta_vesta_affine_deep_copy: (a: number) => number;
readonly __wbg_wasmvestagprojective_free: (a: number) => void;
readonly __wbg_wasmvestagaffine_free: (a: number) => void;
readonly __wbg_wasmpastafpurs_free: (a: number) => void;
readonly caml_pasta_fp_urs_create: (a: number) => number;
readonly caml_pasta_fp_urs_write: (a: number, b: number, c: number, d: number) => void;
readonly caml_pasta_fp_urs_read: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fp_urs_lagrange_commitment: (a: number, b: number, c: number) => number;
readonly caml_pasta_fp_urs_commit_evaluations: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fp_urs_b_poly_commitment: (a: number, b: number, c: number) => number;
readonly caml_pasta_fp_urs_batch_accumulator_check: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fp_urs_h: (a: number) => number;
readonly caml_pasta_fq_urs_create: (a: number) => number;
readonly caml_pasta_fq_urs_write: (a: number, b: number, c: number, d: number) => void;
readonly caml_pasta_fq_urs_read: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fq_urs_lagrange_commitment: (a: number, b: number, c: number) => number;
readonly caml_pasta_fq_urs_commit_evaluations: (a: number, b: number, c: number, d: number) => number;
readonly caml_pasta_fq_urs_b_poly_commitment: (a: number, b: number, c: number) => number;
readonly caml_pasta_fq_urs_batch_accumulator_check: (a: number, b: number, c: number, d: number, e: number) => number;
readonly __wbg_wasmpastafqplonkgatevector_free: (a: number) => void;
readonly __wbg_wasmpastafqplonkgate_free: (a: number) => void;
readonly __wbg_get_wasmpastafqplonkgate_typ: (a: number) => number;
readonly __wbg_set_wasmpastafqplonkgate_typ: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafqplonkgate_wires: (a: number) => number;
readonly __wbg_set_wasmpastafqplonkgate_wires: (a: number, b: number) => void;
readonly wasmpastafqplonkgate_new: (a: number, b: number, c: number, d: number) => number;
readonly wasmpastafqplonkgate_c: (a: number, b: number) => void;
readonly wasmpastafqplonkgate_set_c: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_plonk_gate_vector_create: () => number;
readonly caml_pasta_fq_plonk_gate_vector_add: (a: number, b: number) => void;
readonly caml_pasta_fq_plonk_gate_vector_get: (a: number, b: number) => number;
readonly caml_pasta_fq_plonk_gate_vector_wrap: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastafqplonkindex_free: (a: number) => void;
readonly caml_pasta_fq_plonk_index_create: (a: number, b: number, c: number) => number;
readonly caml_pasta_fq_plonk_index_max_degree: (a: number) => number;
readonly caml_pasta_fq_plonk_index_public_inputs: (a: number) => number;
readonly caml_pasta_fq_plonk_index_domain_d1_size: (a: number) => number;
readonly caml_pasta_fq_plonk_index_domain_d4_size: (a: number) => number;
readonly caml_pasta_fq_plonk_index_domain_d8_size: (a: number) => number;
readonly caml_pasta_fq_plonk_index_read: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fq_plonk_index_write: (a: number, b: number, c: number, d: number) => void;
readonly caml_pasta_fq_urs_h: (a: number) => number;
readonly __wbg_wasmpastafqurs_free: (a: number) => void;
readonly __wbg_wasmpastafpplonkgatevector_free: (a: number) => void;
readonly __wbg_wasmpastafpplonkgate_free: (a: number) => void;
readonly __wbg_get_wasmpastafpplonkgate_typ: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkgate_typ: (a: number, b: number) => void;
readonly __wbg_get_wasmpastafpplonkgate_wires: (a: number) => number;
readonly __wbg_set_wasmpastafpplonkgate_wires: (a: number, b: number) => void;
readonly wasmpastafpplonkgate_new: (a: number, b: number, c: number, d: number) => number;
readonly wasmpastafpplonkgate_c: (a: number, b: number) => void;
readonly wasmpastafpplonkgate_set_c: (a: number, b: number, c: number) => void;
readonly caml_pasta_fp_plonk_gate_vector_create: () => number;
readonly caml_pasta_fp_plonk_gate_vector_add: (a: number, b: number) => void;
readonly caml_pasta_fp_plonk_gate_vector_get: (a: number, b: number) => number;
readonly caml_pasta_fp_plonk_gate_vector_wrap: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastafpplonkindex_free: (a: number) => void;
readonly caml_pasta_fp_plonk_index_create: (a: number, b: number, c: number) => number;
readonly caml_pasta_fp_plonk_index_max_degree: (a: number) => number;
readonly caml_pasta_fp_plonk_index_public_inputs: (a: number) => number;
readonly caml_pasta_fp_plonk_index_domain_d1_size: (a: number) => number;
readonly caml_pasta_fp_plonk_index_domain_d4_size: (a: number) => number;
readonly caml_pasta_fp_plonk_index_domain_d8_size: (a: number) => number;
readonly caml_pasta_fp_plonk_index_read: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fp_plonk_index_write: (a: number, b: number, c: number, d: number) => void;
readonly wasmpastavestapolycomm_new: (a: number, b: number, c: number) => number;
readonly wasmpastavestapolycomm_unshifted: (a: number, b: number) => void;
readonly wasmpastavestapolycomm_set_unshifted: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastapallaspolycomm_free: (a: number) => void;
readonly __wbg_get_wasmpastapallaspolycomm_shifted: (a: number) => number;
readonly __wbg_set_wasmpastapallaspolycomm_shifted: (a: number, b: number) => void;
readonly wasmpastapallaspolycomm_new: (a: number, b: number, c: number) => number;
readonly wasmpastapallaspolycomm_unshifted: (a: number, b: number) => void;
readonly wasmpastapallaspolycomm_set_unshifted: (a: number, b: number, c: number) => void;
readonly __wbg_wasmplonkwire_free: (a: number) => void;
readonly __wbg_get_wasmplonkwire_row: (a: number) => number;
readonly __wbg_set_wasmplonkwire_row: (a: number, b: number) => void;
readonly __wbg_get_wasmplonkwire_col: (a: number) => number;
readonly __wbg_set_wasmplonkwire_col: (a: number, b: number) => void;
readonly wasmplonkwire_new: (a: number, b: number) => number;
readonly __wbg_wasmplonkwires_free: (a: number) => void;
readonly __wbg_get_wasmplonkwires_l: (a: number) => number;
readonly __wbg_set_wasmplonkwires_l: (a: number, b: number) => void;
readonly __wbg_get_wasmplonkwires_r: (a: number) => number;
readonly __wbg_set_wasmplonkwires_r: (a: number, b: number) => void;
readonly __wbg_get_wasmplonkwires_o: (a: number) => number;
readonly __wbg_set_wasmplonkwires_o: (a: number, b: number) => void;
readonly wasmplonkwires_new: (a: number, b: number, c: number, d: number) => number;
readonly __wbg_wasmpastavestapolycomm_free: (a: number) => void;
readonly __wbg_set_wasmpastavestapolycomm_shifted: (a: number, b: number) => void;
readonly __wbg_get_wasmpastavestapolycomm_shifted: (a: number) => number;
readonly __wbg_get_wasmplonkwires_row: (a: number) => number;
readonly __wbg_set_wasmplonkwires_row: (a: number, b: number) => void;
readonly caml_bigint_256_of_numeral: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_bigint_256_of_decimal_string: (a: number, b: number, c: number) => void;
readonly caml_bigint_256_num_limbs: () => number;
readonly caml_bigint_256_bytes_per_limb: () => number;
readonly caml_bigint_256_div: (a: number, b: number, c: number, d: number, e: number) => void;
readonly caml_bigint_256_compare: (a: number, b: number, c: number, d: number) => number;
readonly caml_bigint_256_print: (a: number, b: number) => void;
readonly caml_bigint_256_to_string: (a: number, b: number, c: number) => void;
readonly caml_bigint_256_test_bit: (a: number, b: number, c: number) => number;
readonly caml_bigint_256_to_bytes: (a: number, b: number, c: number) => void;
readonly caml_bigint_256_of_bytes: (a: number, b: number, c: number) => void;
readonly caml_bigint_256_deep_copy: (a: number, b: number, c: number) => void;
readonly greet: (a: number, b: number) => void;
readonly console_log: (a: number, b: number) => void;
readonly create_zero_u32_ptr: () => number;
readonly free_u32_ptr: (a: number) => void;
readonly set_u32_ptr: (a: number, b: number) => void;
readonly wait_until_non_zero: (a: number) => number;
readonly __wbg_wasmpastafqproofevaluations_free: (a: number) => void;
readonly wasmpastafqproofevaluations_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number, k: number, l: number, m: number, n: number, o: number, p: number) => number;
readonly wasmpastafqproofevaluations_l: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_r: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_o: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_z: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_t: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_f: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_sigma1: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_sigma2: (a: number, b: number) => void;
readonly wasmpastafqproofevaluations_set_l: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_r: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_o: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_z: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_t: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_f: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_sigma1: (a: number, b: number, c: number) => void;
readonly wasmpastafqproofevaluations_set_sigma2: (a: number, b: number, c: number) => void;
readonly __wbg_wasmpastafqopeningproof_free: (a: number) => void;
readonly __wbg_get_wasmpastafqopeningproof_z1: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqopeningproof_z1: (a: number, b: number, c: number) => void;
readonly __wbg_get_wasmpastafqopeningproof_z2: (a: number, b: number) => void;
readonly __wbg_set_wasmpastafqopeningproof_z2: (a: number, b: number, c: number) => void;
readonly wasmpastafqopeningproof_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number, j: number) => number;
readonly wasmpastafqopeningproof_lr_0: (a: number, b: number) => void;
readonly wasmpastafqopeningproof_lr_1: (a: number, b: number) => void;
readonly wasmpastafqopeningproof_delta: (a: number) => number;
readonly wasmpastafqopeningproof_sg: (a: number) => number;
readonly wasmpastafqopeningproof_set_lr_0: (a: number, b: number, c: number) => void;
readonly wasmpastafqopeningproof_set_lr_1: (a: number, b: number, c: number) => void;
readonly wasmpastafqopeningproof_set_delta: (a: number, b: number) => void;
readonly wasmpastafqopeningproof_set_sg: (a: number, b: number) => void;
readonly __wbg_wasmpastafqprovercommitments_free: (a: number) => void;
readonly wasmpastafqprovercommitments_new: (a: number, b: number, c: number, d: number, e: number) => number;
readonly wasmpastafqprovercommitments_l_comm: (a: number) => number;
readonly wasmpastafqprovercommitments_r_comm: (a: number) => number;
readonly wasmpastafqprovercommitments_o_comm: (a: number) => number;
readonly wasmpastafqprovercommitments_z_comm: (a: number) => number;
readonly wasmpastafqprovercommitments_t_comm: (a: number) => number;
readonly wasmpastafqprovercommitments_set_l_comm: (a: number, b: number) => void;
readonly wasmpastafqprovercommitments_set_r_comm: (a: number, b: number) => void;
readonly wasmpastafqprovercommitments_set_o_comm: (a: number, b: number) => void;
readonly wasmpastafqprovercommitments_set_z_comm: (a: number, b: number) => void;
readonly wasmpastafqprovercommitments_set_t_comm: (a: number, b: number) => void;
readonly __wbg_wasmvecvecpastafq_free: (a: number) => void;
readonly wasmvecvecpastafq_push: (a: number, b: number, c: number) => void;
readonly wasmvecvecpastafq_get: (a: number, b: number, c: number) => void;
readonly wasmvecvecpastafq_set: (a: number, b: number, c: number, d: number) => void;
readonly __wbg_wasmpastafqproverproof_free: (a: number) => void;
readonly wasmpastafqproverproof_new: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => number;
readonly wasmpastafqproverproof_commitments: (a: number) => number;
readonly wasmpastafqproverproof_proof: (a: number) => number;
readonly wasmpastafqproverproof_evals0: (a: number) => number;
readonly wasmpastafqproverproof_evals1: (a: number) => number;
readonly wasmpastafqproverproof_public_: (a: number, b: number) => void;
readonly wasmpastafqproverproof_prev_challenges_scalars: (a: number) => number;
readonly wasmpastafqproverproof_prev_challenges_comms: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_commitments: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_proof: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_evals0: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_evals1: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_public_: (a: number, b: number, c: number) => void;
readonly wasmpastafqproverproof_set_prev_challenges_scalars: (a: number, b: number) => void;
readonly wasmpastafqproverproof_set_prev_challenges_comms: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_plonk_proof_create: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => number;
readonly caml_pasta_fq_plonk_proof_verify: (a: number, b: number, c: number, d: number) => number;
readonly __wbg_wasmvecvecpallaspolycomm_free: (a: number) => void;
readonly wasmvecvecpallaspolycomm_create: (a: number) => number;
readonly wasmvecvecpallaspolycomm_push: (a: number, b: number, c: number) => void;
readonly caml_pasta_fq_plonk_proof_batch_verify: (a: number, b: number, c: number, d: number, e: number) => number;
readonly caml_pasta_fq_plonk_proof_dummy: () => number;
readonly caml_pasta_fq_plonk_proof_deep_copy: (a: number) => number;
readonly wasmvecvecpastafq_create: (a: number) => number;
readonly __wbg_wbg_rayon_poolbuilder_free: (a: number) => void;
readonly wbg_rayon_poolbuilder_mainJS: (a: number) => number;
readonly wbg_rayon_poolbuilder_numThreads: (a: number) => number;
readonly wbg_rayon_poolbuilder_receiver: (a: number) => number;
readonly wbg_rayon_poolbuilder_build: (a: number) => void;
readonly initThreadPool: (a: number) => number;
readonly wbg_rayon_start_worker: (a: number) => void;
readonly memory: WebAssembly.Memory;
readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
readonly __wbindgen_free: (a: number, b: number) => void;
readonly __wbindgen_malloc: (a: number) => number;
readonly __wbindgen_realloc: (a: number, b: number, c: number) => number;
readonly __wbindgen_exn_store: (a: number) => void;
readonly __wbindgen_start: () => void;
}
/**
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
* for everything else, calls `WebAssembly.instantiate` directly.
*
* @param {InitInput | Promise<InitInput>} module_or_path
* @param {WebAssembly.Memory} maybe_memory
*
* @returns {Promise<InitOutput>}
*/
export default function init (module_or_path?: InitInput | Promise<InitInput>, maybe_memory?: WebAssembly.Memory): Promise<InitOutput>; | the_stack |
import { StateService } from '@uirouter/core';
import { IController, IScope } from 'angular';
import _ = require('lodash');
import { DocumentationQuery, DocumentationService, FolderSituation, PageType } from '../../services/documentation.service';
import NotificationService from '../../services/notification.service';
interface IDocumentationManagementScope extends IScope {
renameFolder: boolean;
translateFolder: boolean;
}
class DocumentationManagementComponentController implements IController {
pages: any[];
folders: any[];
systemFolders: any[];
apiId: string;
rootDir: string;
foldersById: _.Dictionary<any>;
systemFoldersById: _.Dictionary<any>;
currentFolder: any;
supportedTypes: { type: PageType; tooltip: string }[];
breadcrumb: { id: string; name: string }[];
newFolderName: string;
currentTranslation: any;
fetchAllInProgress: boolean;
constructor(
private readonly NotificationService: NotificationService,
private readonly DocumentationService: DocumentationService,
private $state: StateService,
private $scope: IDocumentationManagementScope,
private readonly $mdDialog: angular.material.IDialogService,
) {
'ngInject';
this.apiId = $state.params.apiId;
}
$onInit() {
// remove the ROOT page
this.pages = this.filterROOTAndSystemPages(this.pages);
this.rootDir = this.$state.params.parent;
this.foldersById = _.keyBy(this.folders, 'id');
this.systemFoldersById = _.keyBy(this.systemFolders, 'id');
this.currentFolder = this.getFolder(this.rootDir);
const folderSituation = this.DocumentationService.getFolderSituation(this.systemFoldersById, this.foldersById, this.rootDir);
this.supportedTypes = this.DocumentationService.supportedTypes(folderSituation)
.filter((type) => !this.apiId || type !== PageType.MARKDOWN_TEMPLATE)
.map((type) => ({
type,
tooltip: type.replace('_', ' '),
}));
this.breadcrumb = this.generateBreadcrumb();
this.$scope.renameFolder = false;
this.$scope.translateFolder = false;
}
isFolder(type: string): boolean {
return PageType.FOLDER === type;
}
isLink(type: string): boolean {
return PageType.LINK === type;
}
isSwagger(type: string): boolean {
return PageType.SWAGGER === type;
}
isMarkdown(type: string): boolean {
return PageType.MARKDOWN === type;
}
isPage(type: string): boolean {
return this.isMarkdown(type) || this.isSwagger(type);
}
isMarkdownTemplate(type: string): boolean {
return PageType.MARKDOWN_TEMPLATE === type;
}
canCreateShortCut(pageId: string, pageType: string) {
return (
pageType === 'ASCIIDOC' ||
pageType === 'ASYNCAPI' ||
pageType === 'SWAGGER' ||
pageType === 'MARKDOWN' ||
(pageType === 'FOLDER' &&
this.DocumentationService.getFolderSituation(this.systemFoldersById, this.foldersById, pageId) !==
FolderSituation.FOLDER_IN_SYSTEM_FOLDER)
);
}
filterROOTAndSystemPages(pagesToFilter: any[]) {
return _.filter(pagesToFilter, (p) => p.type !== 'ROOT' && p.type !== 'SYSTEM_FOLDER' && p.type !== 'TRANSLATION');
}
toggleRenameFolder() {
this.$scope.renameFolder = !this.$scope.renameFolder;
if (this.$scope.renameFolder) {
this.newFolderName = this.breadcrumb[this.breadcrumb.length - 1].name;
}
}
renameFolder() {
this.DocumentationService.partialUpdate('name', this.newFolderName, this.rootDir, this.apiId).then((response) => {
this.NotificationService.show('Folder ' + this.newFolderName + ' has been changed with success');
this.breadcrumb[this.breadcrumb.length - 1].name = response.data.name;
this.toggleRenameFolder();
});
}
changeFolderVisibility() {
this.DocumentationService.partialUpdate(
'visibility',
this.currentFolder.visibility === 'PRIVATE' ? 'PUBLIC' : 'PRIVATE',
this.rootDir,
this.apiId,
).then((response) => {
this.NotificationService.show(`Folder is now ${response.data.visibility}`);
this.currentFolder.visibility = response.data.visibility;
});
}
generateBreadcrumb(): { id: string; name: string }[] {
const result: { id: string; name: string }[] = [];
if (this.rootDir) {
this.addParentToBreadcrumb(this.rootDir, result);
}
result.push({ id: '', name: '~' });
return result.reverse();
}
getFolder(id: string) {
if (id) {
let folder = this.foldersById[id];
if (!folder) {
folder = this.systemFoldersById[id];
}
return folder;
}
}
createShortCut(page: any) {
this.$mdDialog
.show({
controller: 'SelectFolderDialogController',
controllerAs: 'ctrl',
template: require('./dialog/selectfolder.dialog.html'),
clickOutsideToClose: true,
locals: {
title: 'Create shortcut for "' + page.name + '" in...',
folders: this.generateCreateShortCutFolder(),
},
})
.then((destinationId) => {
if (destinationId) {
const newLink = {
name: page.name,
content: page.id,
parentId: destinationId,
type: 'LINK',
published: page.published,
visibility: page.visibility,
configuration: {
resourceType: 'page',
isFolder: page.type === 'FOLDER',
inherit: 'true',
},
};
this.DocumentationService.create(newLink, this.apiId).then(() => {
this.NotificationService.show('"Link to ' + page.name + '" has been created with success');
this.refresh();
});
}
});
}
generateCreateShortCutFolder() {
const result = [];
if (!this.folders && !this.systemFolders) {
return result;
}
const allFolders = _.concat(this.folders, this.systemFolders);
allFolders.forEach((f) => {
const situation = this.DocumentationService.getFolderSituation(this.systemFoldersById, this.foldersById, f.id);
if (
situation === FolderSituation.SYSTEM_FOLDER ||
situation === FolderSituation.SYSTEM_FOLDER_WITH_FOLDERS ||
situation === FolderSituation.FOLDER_IN_SYSTEM_FOLDER
) {
const path = this.getFolderPath(f.id);
if (path) {
result.push({
id: f.id,
path: path,
});
}
}
});
return _.orderBy(result, ['path'], ['asc']);
}
moveToFolder(page: any) {
this.$mdDialog
.show({
controller: 'SelectFolderDialogController',
controllerAs: 'ctrl',
template: require('./dialog/selectfolder.dialog.html'),
clickOutsideToClose: true,
locals: {
title: 'Move "' + page.name + '" to...',
folders: this.generateMoveToFolder(page.id, page.type),
},
})
.then((destinationId) => {
if (destinationId) {
this.DocumentationService.partialUpdate('parentId', destinationId === -1 ? '' : destinationId, page.id, this.apiId).then(() => {
this.NotificationService.show('"' + page.name + '" has been moved with success');
this.refresh();
});
}
});
}
generateMoveToFolder(pageId: string, pageType: string) {
const result = [];
if (!this.folders && !this.systemFolders) {
return result;
}
const allFolders = _.concat(this.folders, this.systemFolders);
// If it can ba a link, it can't be moved in a system folder. If not, it can only be moved inside a system folder
const canBeALink = this.canCreateShortCut(pageId, pageType);
if (canBeALink) {
result.push({
id: -1,
path: '/',
});
}
allFolders.forEach((f) => {
const situation = this.DocumentationService.getFolderSituation(this.systemFoldersById, this.foldersById, f.id);
if (
(canBeALink && (situation === FolderSituation.ROOT || situation === FolderSituation.FOLDER_IN_FOLDER)) ||
(!canBeALink &&
((pageType === 'FOLDER' && situation === FolderSituation.SYSTEM_FOLDER_WITH_FOLDERS) ||
(pageType !== 'FOLDER' &&
(situation === FolderSituation.SYSTEM_FOLDER ||
situation === FolderSituation.SYSTEM_FOLDER_WITH_FOLDERS ||
situation === FolderSituation.FOLDER_IN_SYSTEM_FOLDER))))
) {
const path = this.getFolderPath(f.id, pageId);
if (path) {
result.push({
id: f.id,
path: path,
});
}
}
});
return _.orderBy(result, ['path'], ['asc']);
}
getFolderPath(folderId: string, pageToMoveId?: string) {
const hierarchyNames = [];
const folder = this.getFolder(folderId);
hierarchyNames.push(folder.name);
this.getFolderParentName(folderId, hierarchyNames, pageToMoveId);
if (hierarchyNames.length === 0) {
return;
}
return (
'/ ' +
_.reduceRight(hierarchyNames, (path, name) => {
return path + ' / ' + name;
})
);
}
getFolderParentName(folderId: string, names: string[], pageToMoveId: string) {
const folder = this.getFolder(folderId);
// do not move a folder to itself
if (folderId === pageToMoveId || (folder.parentId && pageToMoveId === folder.parentId)) {
names.length = 0;
return;
}
if (folder.parentId) {
const parentFolder = this.getFolder(folder.parentId);
if (parentFolder) {
names.push(parentFolder.name);
this.getFolderParentName(folder.parentId, names, pageToMoveId);
}
}
}
addParentToBreadcrumb(id: string, breadcrumb: any[]) {
const folder = this.getFolder(id);
if (folder) {
breadcrumb.push(folder);
if (folder.parentId) {
this.addParentToBreadcrumb(folder.parentId, breadcrumb);
}
}
}
refresh() {
const q = new DocumentationQuery();
if (this.rootDir) {
q.parent = this.rootDir;
} else {
q.root = true;
}
this.DocumentationService.search(q, this.apiId).then((response) => (this.pages = this.filterROOTAndSystemPages(response.data)));
}
refreshCurrentFolder() {
if (this.rootDir) {
this.DocumentationService.get(this.apiId, this.rootDir).then((response) => (this.currentFolder = response.data));
delete this.currentTranslation;
}
}
togglePublish(page: any) {
if (page.generalConditions) {
this.NotificationService.showError('Page ' + page.name + ' is used as general conditions');
} else {
this.DocumentationService.partialUpdate('published', !page.published, page.id, this.apiId).then(() => {
page.published = !page.published;
const message = this.isMarkdownTemplate(page.type)
? 'Template ' + page.name + ' has been made ' + (page.published ? '' : 'un') + 'available with success'
: 'Page ' + page.name + ' has been ' + (page.published ? '' : 'un') + 'published with success';
this.NotificationService.show(message);
});
}
}
upward(page: any) {
page.order = page.order - 1;
this.DocumentationService.partialUpdate('order', page.order, page.id, this.apiId).then(() => {
this.NotificationService.show('Page ' + page.name + ' order has been changed with success');
this.refresh();
});
}
downward(page: any) {
page.order = page.order + 1;
this.DocumentationService.partialUpdate('order', page.order, page.id, this.apiId).then(() => {
this.NotificationService.show('Page ' + page.name + ' order has been changed with success');
this.refresh();
});
}
remove(page: any) {
this.$mdDialog
.show({
controller: 'DialogConfirmController',
controllerAs: 'ctrl',
template: require('../dialog/confirmWarning.dialog.html'),
clickOutsideToClose: true,
locals: {
title: 'Would you like to remove "' + page.name + '"?',
msg: page.type !== 'LINK' ? 'All related links will also be removed.' : '',
confirmButton: 'Remove',
},
})
.then((response) => {
if (response) {
this.DocumentationService.remove(page.id, this.apiId).then(() => {
this.NotificationService.show('Page ' + page.name + ' has been removed');
this.refresh();
this.refreshCurrentFolder();
if (this.currentTranslation.id === page.id) {
delete this.currentTranslation;
}
});
}
});
}
newPage(type: string) {
if (this.apiId) {
this.$state.go('management.apis.detail.portal.newdocumentation', { type: type, parent: this.rootDir });
} else {
this.$state.go('management.settings.newdocumentation', { type: type, parent: this.rootDir });
}
}
openUrl(page: any) {
if ('FOLDER' === page.type || 'SYSTEM_FOLDER' === page.type) {
if (this.apiId) {
return this.$state.go('management.apis.detail.portal.documentation', { apiId: this.apiId, type: page.type, parent: page.id });
} else {
return this.$state.go('management.settings.documentation', { parent: page.id });
}
} else {
if (this.apiId) {
return this.$state.go('management.apis.detail.portal.editdocumentation', { apiId: this.apiId, type: page.type, pageId: page.id });
} else {
return this.$state.go('management.settings.editdocumentation', { pageId: page.id, type: page.type, tab: 'content' });
}
}
}
importPages() {
if (this.apiId) {
this.$state.go('management.apis.detail.portal.importdocumentation', { apiId: this.apiId });
} else {
this.$state.go('management.settings.importdocumentation');
}
}
fetch() {
this.fetchAllInProgress = true;
this.DocumentationService.fetchAll(this.apiId)
.then(() => {
this.refresh();
this.NotificationService.show('Pages has been successfully fetched');
})
.finally(() => {
this.fetchAllInProgress = false;
});
}
hasExternalDoc() {
const externalPages = this.pages.filter((page) => Object.prototype.hasOwnProperty.call(page, 'source'));
return externalPages.length > 0;
}
toggleTranslateFolder() {
this.$scope.translateFolder = !this.$scope.translateFolder;
}
saveFolderTranslation() {
if (!this.currentTranslation.id) {
this.DocumentationService.create(this.currentTranslation, this.apiId).then((response: any) => {
const page = response.data;
this.NotificationService.show("'" + page.name + "' has been created");
this.refreshCurrentFolder();
});
} else {
this.DocumentationService.update(this.currentTranslation, this.apiId).then(() => {
this.NotificationService.show("'" + this.currentTranslation.name + "' has been updated");
this.refreshCurrentFolder();
});
}
}
selectTranslation(translation: any) {
this.currentTranslation = translation;
}
addTranslation() {
this.currentTranslation = {
type: 'TRANSLATION',
parentId: this.currentFolder.id,
};
}
}
export const DocumentationManagementComponent: ng.IComponentOptions = {
bindings: {
pages: '<',
folders: '<',
systemFolders: '<',
},
template: require('./documentation-management.html'),
controller: DocumentationManagementComponentController,
}; | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs, enums } from "../types";
export interface AppSpec {
databases?: pulumi.Input<pulumi.Input<inputs.AppSpecDatabase>[]>;
/**
* Describes a domain where the application will be made available.
*/
domainNames?: pulumi.Input<pulumi.Input<inputs.AppSpecDomainName>[]>;
/**
* @deprecated This attribute has been replaced by `domain` which supports additional functionality.
*/
domains?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Describes an environment variable made available to an app competent.
*/
envs?: pulumi.Input<pulumi.Input<inputs.AppSpecEnv>[]>;
jobs?: pulumi.Input<pulumi.Input<inputs.AppSpecJob>[]>;
/**
* The name of the component.
*/
name: pulumi.Input<string>;
/**
* The slug for the DigitalOcean data center region hosting the app.
*/
region?: pulumi.Input<string>;
services?: pulumi.Input<pulumi.Input<inputs.AppSpecService>[]>;
staticSites?: pulumi.Input<pulumi.Input<inputs.AppSpecStaticSite>[]>;
workers?: pulumi.Input<pulumi.Input<inputs.AppSpecWorker>[]>;
}
export interface AppSpecDatabase {
/**
* The name of the underlying DigitalOcean DBaaS cluster. This is required for production databases. For dev databases, if `clusterName` is not set, a new cluster will be provisioned.
*/
clusterName?: pulumi.Input<string>;
/**
* The name of the MySQL or PostgreSQL database to configure.
*/
dbName?: pulumi.Input<string>;
/**
* The name of the MySQL or PostgreSQL user to configure.
*/
dbUser?: pulumi.Input<string>;
/**
* The database engine to use (`MYSQL`, `PG`, or `REDIS`).
*/
engine?: pulumi.Input<string>;
/**
* The name of the component.
*/
name?: pulumi.Input<string>;
/**
* Whether this is a production or dev database.
*/
production?: pulumi.Input<boolean>;
/**
* The version of the database engine.
*/
version?: pulumi.Input<string>;
}
export interface AppSpecDomainName {
/**
* The name of the component.
*/
name: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* A boolean indicating whether the domain includes all sub-domains, in addition to the given domain.
*/
wildcard?: pulumi.Input<boolean>;
/**
* If the domain uses DigitalOcean DNS and you would like App Platform to automatically manage it for you, set this to the name of the domain on your account.
*/
zone?: pulumi.Input<string>;
}
export interface AppSpecEnv {
/**
* The name of the environment variable.
*/
key?: pulumi.Input<string>;
/**
* The visibility scope of the environment variable. One of `RUN_TIME`, `BUILD_TIME`, or `RUN_AND_BUILD_TIME` (default).
*/
scope?: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* The value of the environment variable.
*/
value?: pulumi.Input<string>;
}
export interface AppSpecJob {
/**
* An optional build command to run while building this component from source.
*/
buildCommand?: pulumi.Input<string>;
/**
* The path to a Dockerfile relative to the root of the repo. If set, overrides usage of buildpacks.
*/
dockerfilePath?: pulumi.Input<string>;
/**
* An environment slug describing the type of this app.
*/
environmentSlug?: pulumi.Input<string>;
/**
* Describes an environment variable made available to an app competent.
*/
envs?: pulumi.Input<pulumi.Input<inputs.AppSpecJobEnv>[]>;
/**
* A Git repo to use as the component's source. The repository must be able to be cloned without authentication. Only one of `git`, `github` or `gitlab` may be set
*/
git?: pulumi.Input<inputs.AppSpecJobGit>;
/**
* A GitHub repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/github/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
github?: pulumi.Input<inputs.AppSpecJobGithub>;
/**
* A Gitlab repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/gitlab/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
gitlab?: pulumi.Input<inputs.AppSpecJobGitlab>;
/**
* An image to use as the component's source. Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
image?: pulumi.Input<inputs.AppSpecJobImage>;
/**
* The amount of instances that this component should be scaled to.
*/
instanceCount?: pulumi.Input<number>;
/**
* The instance size to use for this component. This determines the plan (basic or professional) and the available CPU and memory. The list of available instance sizes can be [found with the API](https://docs.digitalocean.com/reference/api/api-reference/#operation/list_instance_sizes) or using the [doctl CLI](https://docs.digitalocean.com/reference/doctl/) (`doctl apps tier instance-size list`). Default: `basic-xxs`
*/
instanceSizeSlug?: pulumi.Input<string>;
/**
* The type of job and when it will be run during the deployment process. It may be one of:
* - `UNSPECIFIED`: Default job type, will auto-complete to POST_DEPLOY kind.
* - `PRE_DEPLOY`: Indicates a job that runs before an app deployment.
* - `POST_DEPLOY`: Indicates a job that runs after an app deployment.
* - `FAILED_DEPLOY`: Indicates a job that runs after a component fails to deploy.
*/
kind?: pulumi.Input<string>;
/**
* The name of the component.
*/
name: pulumi.Input<string>;
/**
* An optional run command to override the component's default.
*/
runCommand?: pulumi.Input<string>;
/**
* An optional path to the working directory to use for the build.
*/
sourceDir?: pulumi.Input<string>;
}
export interface AppSpecJobEnv {
/**
* The name of the environment variable.
*/
key?: pulumi.Input<string>;
/**
* The visibility scope of the environment variable. One of `RUN_TIME`, `BUILD_TIME`, or `RUN_AND_BUILD_TIME` (default).
*/
scope?: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* The value of the environment variable.
*/
value?: pulumi.Input<string>;
}
export interface AppSpecJobGit {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* The clone URL of the repo.
*/
repoCloneUrl?: pulumi.Input<string>;
}
export interface AppSpecJobGithub {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecJobGitlab {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecJobImage {
/**
* The registry name. Must be left empty for the `DOCR` registry type. Required for the `DOCKER_HUB` registry type.
*/
registry?: pulumi.Input<string>;
/**
* The registry type. One of `DOCR` (DigitalOcean container registry) or `DOCKER_HUB`.
*/
registryType: pulumi.Input<string>;
/**
* The repository name.
*/
repository: pulumi.Input<string>;
/**
* The repository tag. Defaults to `latest` if not provided.
*/
tag?: pulumi.Input<string>;
}
export interface AppSpecService {
/**
* An optional build command to run while building this component from source.
*/
buildCommand?: pulumi.Input<string>;
/**
* The [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policies of the app.
*/
cors?: pulumi.Input<inputs.AppSpecServiceCors>;
/**
* The path to a Dockerfile relative to the root of the repo. If set, overrides usage of buildpacks.
*/
dockerfilePath?: pulumi.Input<string>;
/**
* An environment slug describing the type of this app.
*/
environmentSlug?: pulumi.Input<string>;
/**
* Describes an environment variable made available to an app competent.
*/
envs?: pulumi.Input<pulumi.Input<inputs.AppSpecServiceEnv>[]>;
/**
* A Git repo to use as the component's source. The repository must be able to be cloned without authentication. Only one of `git`, `github` or `gitlab` may be set
*/
git?: pulumi.Input<inputs.AppSpecServiceGit>;
/**
* A GitHub repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/github/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
github?: pulumi.Input<inputs.AppSpecServiceGithub>;
/**
* A Gitlab repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/gitlab/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
gitlab?: pulumi.Input<inputs.AppSpecServiceGitlab>;
/**
* A health check to determine the availability of this component.
*/
healthCheck?: pulumi.Input<inputs.AppSpecServiceHealthCheck>;
/**
* The internal port on which this service's run command will listen.
*/
httpPort?: pulumi.Input<number>;
/**
* An image to use as the component's source. Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
image?: pulumi.Input<inputs.AppSpecServiceImage>;
/**
* The amount of instances that this component should be scaled to.
*/
instanceCount?: pulumi.Input<number>;
/**
* The instance size to use for this component. This determines the plan (basic or professional) and the available CPU and memory. The list of available instance sizes can be [found with the API](https://docs.digitalocean.com/reference/api/api-reference/#operation/list_instance_sizes) or using the [doctl CLI](https://docs.digitalocean.com/reference/doctl/) (`doctl apps tier instance-size list`). Default: `basic-xxs`
*/
instanceSizeSlug?: pulumi.Input<string>;
/**
* A list of ports on which this service will listen for internal traffic.
*/
internalPorts?: pulumi.Input<pulumi.Input<number>[]>;
/**
* The name of the component.
*/
name: pulumi.Input<string>;
routes?: pulumi.Input<pulumi.Input<inputs.AppSpecServiceRoute>[]>;
/**
* An optional run command to override the component's default.
*/
runCommand?: pulumi.Input<string>;
/**
* An optional path to the working directory to use for the build.
*/
sourceDir?: pulumi.Input<string>;
}
export interface AppSpecServiceCors {
/**
* Whether browsers should expose the response to the client-side JavaScript code when the request's credentials mode is `include`. This configures the `Access-Control-Allow-Credentials` header.
*/
allowCredentials?: pulumi.Input<boolean>;
/**
* The set of allowed HTTP request headers. This configures the `Access-Control-Allow-Headers` header.
*/
allowHeaders?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The set of allowed HTTP methods. This configures the `Access-Control-Allow-Methods` header.
*/
allowMethods?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The `Access-Control-Allow-Origin` can be
*/
allowOrigins?: pulumi.Input<inputs.AppSpecServiceCorsAllowOrigins>;
/**
* The set of HTTP response headers that browsers are allowed to access. This configures the `Access-Control-Expose-Headers` header.
*/
exposeHeaders?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An optional duration specifying how long browsers can cache the results of a preflight request. This configures the Access-Control-Max-Age header. Example: `5h30m`.
*/
maxAge?: pulumi.Input<string>;
}
export interface AppSpecServiceCorsAllowOrigins {
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin only if the client's origin exactly matches the value you provide.
*/
exact?: pulumi.Input<string>;
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin if the beginning of the client's origin matches the value you provide.
*/
prefix?: pulumi.Input<string>;
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin if the client’s origin matches the regex you provide, in [RE2 style syntax](https://github.com/google/re2/wiki/Syntax).
*/
regex?: pulumi.Input<string>;
}
export interface AppSpecServiceEnv {
/**
* The name of the environment variable.
*/
key?: pulumi.Input<string>;
/**
* The visibility scope of the environment variable. One of `RUN_TIME`, `BUILD_TIME`, or `RUN_AND_BUILD_TIME` (default).
*/
scope?: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* The value of the environment variable.
*/
value?: pulumi.Input<string>;
}
export interface AppSpecServiceGit {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* The clone URL of the repo.
*/
repoCloneUrl?: pulumi.Input<string>;
}
export interface AppSpecServiceGithub {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecServiceGitlab {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecServiceHealthCheck {
/**
* The number of failed health checks before considered unhealthy.
*/
failureThreshold?: pulumi.Input<number>;
/**
* The route path used for the HTTP health check ping.
*/
httpPath?: pulumi.Input<string>;
/**
* The number of seconds to wait before beginning health checks.
*/
initialDelaySeconds?: pulumi.Input<number>;
/**
* The number of seconds to wait between health checks.
*/
periodSeconds?: pulumi.Input<number>;
/**
* The number of successful health checks before considered healthy.
*/
successThreshold?: pulumi.Input<number>;
/**
* The number of seconds after which the check times out.
*/
timeoutSeconds?: pulumi.Input<number>;
}
export interface AppSpecServiceImage {
/**
* The registry name. Must be left empty for the `DOCR` registry type. Required for the `DOCKER_HUB` registry type.
*/
registry?: pulumi.Input<string>;
/**
* The registry type. One of `DOCR` (DigitalOcean container registry) or `DOCKER_HUB`.
*/
registryType: pulumi.Input<string>;
/**
* The repository name.
*/
repository: pulumi.Input<string>;
/**
* The repository tag. Defaults to `latest` if not provided.
*/
tag?: pulumi.Input<string>;
}
export interface AppSpecServiceRoute {
/**
* Paths must start with `/` and must be unique within the app.
*/
path?: pulumi.Input<string>;
}
export interface AppSpecStaticSite {
/**
* An optional build command to run while building this component from source.
*/
buildCommand?: pulumi.Input<string>;
/**
* The name of the document to use as the fallback for any requests to documents that are not found when serving this static site.
*/
catchallDocument?: pulumi.Input<string>;
/**
* The [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policies of the app.
*/
cors?: pulumi.Input<inputs.AppSpecStaticSiteCors>;
/**
* The path to a Dockerfile relative to the root of the repo. If set, overrides usage of buildpacks.
*/
dockerfilePath?: pulumi.Input<string>;
/**
* An environment slug describing the type of this app.
*/
environmentSlug?: pulumi.Input<string>;
/**
* Describes an environment variable made available to an app competent.
*/
envs?: pulumi.Input<pulumi.Input<inputs.AppSpecStaticSiteEnv>[]>;
/**
* The name of the error document to use when serving this static site.
*/
errorDocument?: pulumi.Input<string>;
/**
* A Git repo to use as the component's source. The repository must be able to be cloned without authentication. Only one of `git`, `github` or `gitlab` may be set
*/
git?: pulumi.Input<inputs.AppSpecStaticSiteGit>;
/**
* A GitHub repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/github/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
github?: pulumi.Input<inputs.AppSpecStaticSiteGithub>;
/**
* A Gitlab repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/gitlab/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
gitlab?: pulumi.Input<inputs.AppSpecStaticSiteGitlab>;
/**
* The name of the index document to use when serving this static site.
*/
indexDocument?: pulumi.Input<string>;
/**
* The name of the component.
*/
name: pulumi.Input<string>;
/**
* An optional path to where the built assets will be located, relative to the build context. If not set, App Platform will automatically scan for these directory names: `_static`, `dist`, `public`.
*/
outputDir?: pulumi.Input<string>;
routes?: pulumi.Input<pulumi.Input<inputs.AppSpecStaticSiteRoute>[]>;
/**
* An optional path to the working directory to use for the build.
*/
sourceDir?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteCors {
/**
* Whether browsers should expose the response to the client-side JavaScript code when the request's credentials mode is `include`. This configures the `Access-Control-Allow-Credentials` header.
*/
allowCredentials?: pulumi.Input<boolean>;
/**
* The set of allowed HTTP request headers. This configures the `Access-Control-Allow-Headers` header.
*/
allowHeaders?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The set of allowed HTTP methods. This configures the `Access-Control-Allow-Methods` header.
*/
allowMethods?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The `Access-Control-Allow-Origin` can be
*/
allowOrigins?: pulumi.Input<inputs.AppSpecStaticSiteCorsAllowOrigins>;
/**
* The set of HTTP response headers that browsers are allowed to access. This configures the `Access-Control-Expose-Headers` header.
*/
exposeHeaders?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An optional duration specifying how long browsers can cache the results of a preflight request. This configures the Access-Control-Max-Age header. Example: `5h30m`.
*/
maxAge?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteCorsAllowOrigins {
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin only if the client's origin exactly matches the value you provide.
*/
exact?: pulumi.Input<string>;
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin if the beginning of the client's origin matches the value you provide.
*/
prefix?: pulumi.Input<string>;
/**
* The `Access-Control-Allow-Origin` header will be set to the client's origin if the client’s origin matches the regex you provide, in [RE2 style syntax](https://github.com/google/re2/wiki/Syntax).
*/
regex?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteEnv {
/**
* The name of the environment variable.
*/
key?: pulumi.Input<string>;
/**
* The visibility scope of the environment variable. One of `RUN_TIME`, `BUILD_TIME`, or `RUN_AND_BUILD_TIME` (default).
*/
scope?: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* The value of the environment variable.
*/
value?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteGit {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* The clone URL of the repo.
*/
repoCloneUrl?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteGithub {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteGitlab {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecStaticSiteRoute {
/**
* Paths must start with `/` and must be unique within the app.
*/
path?: pulumi.Input<string>;
}
export interface AppSpecWorker {
/**
* An optional build command to run while building this component from source.
*/
buildCommand?: pulumi.Input<string>;
/**
* The path to a Dockerfile relative to the root of the repo. If set, overrides usage of buildpacks.
*/
dockerfilePath?: pulumi.Input<string>;
/**
* An environment slug describing the type of this app.
*/
environmentSlug?: pulumi.Input<string>;
/**
* Describes an environment variable made available to an app competent.
*/
envs?: pulumi.Input<pulumi.Input<inputs.AppSpecWorkerEnv>[]>;
/**
* A Git repo to use as the component's source. The repository must be able to be cloned without authentication. Only one of `git`, `github` or `gitlab` may be set
*/
git?: pulumi.Input<inputs.AppSpecWorkerGit>;
/**
* A GitHub repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/github/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
github?: pulumi.Input<inputs.AppSpecWorkerGithub>;
/**
* A Gitlab repo to use as the component's source. DigitalOcean App Platform must have [access to the repository](https://cloud.digitalocean.com/apps/gitlab/install). Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
gitlab?: pulumi.Input<inputs.AppSpecWorkerGitlab>;
/**
* An image to use as the component's source. Only one of `git`, `github`, `gitlab`, or `image` may be set.
*/
image?: pulumi.Input<inputs.AppSpecWorkerImage>;
/**
* The amount of instances that this component should be scaled to.
*/
instanceCount?: pulumi.Input<number>;
/**
* The instance size to use for this component. This determines the plan (basic or professional) and the available CPU and memory. The list of available instance sizes can be [found with the API](https://docs.digitalocean.com/reference/api/api-reference/#operation/list_instance_sizes) or using the [doctl CLI](https://docs.digitalocean.com/reference/doctl/) (`doctl apps tier instance-size list`). Default: `basic-xxs`
*/
instanceSizeSlug?: pulumi.Input<string>;
/**
* The name of the component.
*/
name: pulumi.Input<string>;
/**
* An optional run command to override the component's default.
*/
runCommand?: pulumi.Input<string>;
/**
* An optional path to the working directory to use for the build.
*/
sourceDir?: pulumi.Input<string>;
}
export interface AppSpecWorkerEnv {
/**
* The name of the environment variable.
*/
key?: pulumi.Input<string>;
/**
* The visibility scope of the environment variable. One of `RUN_TIME`, `BUILD_TIME`, or `RUN_AND_BUILD_TIME` (default).
*/
scope?: pulumi.Input<string>;
/**
* The type of the environment variable, `GENERAL` or `SECRET`.
*/
type?: pulumi.Input<string>;
/**
* The value of the environment variable.
*/
value?: pulumi.Input<string>;
}
export interface AppSpecWorkerGit {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* The clone URL of the repo.
*/
repoCloneUrl?: pulumi.Input<string>;
}
export interface AppSpecWorkerGithub {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecWorkerGitlab {
/**
* The name of the branch to use.
*/
branch?: pulumi.Input<string>;
/**
* Whether to automatically deploy new commits made to the repo.
*/
deployOnPush?: pulumi.Input<boolean>;
/**
* The name of the repo in the format `owner/repo`.
*/
repo?: pulumi.Input<string>;
}
export interface AppSpecWorkerImage {
/**
* The registry name. Must be left empty for the `DOCR` registry type. Required for the `DOCKER_HUB` registry type.
*/
registry?: pulumi.Input<string>;
/**
* The registry type. One of `DOCR` (DigitalOcean container registry) or `DOCKER_HUB`.
*/
registryType: pulumi.Input<string>;
/**
* The repository name.
*/
repository: pulumi.Input<string>;
/**
* The repository tag. Defaults to `latest` if not provided.
*/
tag?: pulumi.Input<string>;
}
export interface DatabaseClusterMaintenanceWindow {
/**
* The day of the week on which to apply maintenance updates.
*/
day: pulumi.Input<string>;
/**
* The hour in UTC at which maintenance updates will be applied in 24 hour format.
*/
hour: pulumi.Input<string>;
}
export interface DatabaseFirewallRule {
/**
* The date and time when the firewall rule was created.
*/
createdAt?: pulumi.Input<string>;
/**
* The type of resource that the firewall rule allows to access the database cluster. The possible values are: `droplet`, `k8s`, `ipAddr`, `tag`, or `app`.
*/
type: pulumi.Input<string>;
/**
* A unique identifier for the firewall rule.
*/
uuid?: pulumi.Input<string>;
/**
* The ID of the specific resource, the name of a tag applied to a group of resources, or the IP address that the firewall rule allows to access the database cluster.
*/
value: pulumi.Input<string>;
}
export interface FirewallInboundRule {
/**
* The ports on which traffic will be allowed
* specified as a string containing a single port, a range (e.g. "8000-9000"),
* or "1-65535" to open all ports for a protocol. Required for when protocol is
* `tcp` or `udp`.
*/
portRange?: pulumi.Input<string>;
/**
* The type of traffic to be allowed.
* This may be one of "tcp", "udp", or "icmp".
*/
protocol: pulumi.Input<string>;
/**
* An array of strings containing the IPv4
* addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs from which the
* inbound traffic will be accepted.
*/
sourceAddresses?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An array containing the IDs of
* the Droplets from which the inbound traffic will be accepted.
*/
sourceDropletIds?: pulumi.Input<pulumi.Input<number>[]>;
/**
* An array containing the IDs
* of the Load Balancers from which the inbound traffic will be accepted.
*/
sourceLoadBalancerUids?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An array containing the names of Tags
* corresponding to groups of Droplets from which the inbound traffic
* will be accepted.
*/
sourceTags?: pulumi.Input<pulumi.Input<string>[]>;
}
export interface FirewallOutboundRule {
/**
* An array of strings containing the IPv4
* addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which the
* outbound traffic will be allowed.
*/
destinationAddresses?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An array containing the IDs of
* the Droplets to which the outbound traffic will be allowed.
*/
destinationDropletIds?: pulumi.Input<pulumi.Input<number>[]>;
/**
* An array containing the IDs
* of the Load Balancers to which the outbound traffic will be allowed.
*/
destinationLoadBalancerUids?: pulumi.Input<pulumi.Input<string>[]>;
/**
* An array containing the names of Tags
* corresponding to groups of Droplets to which the outbound traffic will
* be allowed.
* traffic.
*/
destinationTags?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The ports on which traffic will be allowed
* specified as a string containing a single port, a range (e.g. "8000-9000"),
* or "1-65535" to open all ports for a protocol. Required for when protocol is
* `tcp` or `udp`.
*/
portRange?: pulumi.Input<string>;
/**
* The type of traffic to be allowed.
* This may be one of "tcp", "udp", or "icmp".
*/
protocol: pulumi.Input<string>;
}
export interface FirewallPendingChange {
dropletId?: pulumi.Input<number>;
removing?: pulumi.Input<boolean>;
/**
* A status string indicating the current state of the Firewall.
* This can be "waiting", "succeeded", or "failed".
*/
status?: pulumi.Input<string>;
}
export interface GetDomainsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the domains by this key. This may be one of `name`, `urn`, and `ttl`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves domains
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetDomainsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the domains by this key. This may be one of `name`, `urn`, and `ttl`.
*/
key: string;
}
export interface GetDropletsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the Droplets by this key. This may be one of `backups`, `createdAt`, `disk`, `id`,
* `image`, `ipv4Address`, `ipv4AddressPrivate`, `ipv6`, `ipv6Address`, `ipv6AddressPrivate`, `locked`,
* `memory`, `monitoring`, `name`, `priceHourly`, `priceMonthly`, `privateNetworking`, `region`, `size`,
* `status`, `tags`, `urn`, `vcpus`, `volumeIds`, or `vpcUuid`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves Droplets
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetDropletsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the Droplets by this key. This may be one of `backups`, `createdAt`, `disk`, `id`,
* `image`, `ipv4Address`, `ipv4AddressPrivate`, `ipv6`, `ipv6Address`, `ipv6AddressPrivate`, `locked`,
* `memory`, `monitoring`, `name`, `priceHourly`, `priceMonthly`, `privateNetworking`, `region`, `size`,
* `status`, `urn`, `vcpus`, or `vpcUuid`.
*/
key: string;
}
export interface GetFirewallInboundRule {
/**
* The ports on which traffic will be allowed
* specified as a string containing a single port, a range (e.g. "8000-9000"),
* or "1-65535" to open all ports for a protocol. Required for when protocol is
* `tcp` or `udp`.
*/
portRange?: string;
/**
* The type of traffic to be allowed.
* This may be one of "tcp", "udp", or "icmp".
*/
protocol: string;
/**
* An array of strings containing the IPv4
* addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs from which the
* inbound traffic will be accepted.
*/
sourceAddresses?: string[];
/**
* An array containing the IDs of
* the Droplets from which the inbound traffic will be accepted.
*/
sourceDropletIds?: number[];
/**
* An array containing the IDs
* of the Load Balancers from which the inbound traffic will be accepted.
*/
sourceLoadBalancerUids?: string[];
/**
* A set of names of Tags corresponding to group of
* Droplets from which the inbound traffic will be accepted.
*/
sourceTags?: string[];
}
export interface GetFirewallOutboundRule {
/**
* An array of strings containing the IPv4
* addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which the
* outbound traffic will be allowed.
*/
destinationAddresses?: string[];
/**
* An array containing the IDs of
* the Droplets to which the outbound traffic will be allowed.
*/
destinationDropletIds?: number[];
/**
* An array containing the IDs
* of the Load Balancers to which the outbound traffic will be allowed.
*/
destinationLoadBalancerUids?: string[];
/**
* An array containing the names of Tags
* corresponding to groups of Droplets to which the outbound traffic will
* be allowed.
* traffic.
*/
destinationTags?: string[];
/**
* The ports on which traffic will be allowed
* specified as a string containing a single port, a range (e.g. "8000-9000"),
* or "1-65535" to open all ports for a protocol. Required for when protocol is
* `tcp` or `udp`.
*/
portRange?: string;
/**
* The type of traffic to be allowed.
* This may be one of "tcp", "udp", or "icmp".
*/
protocol: string;
}
export interface GetImagesFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the images by this key. This may be one of `distribution`, `errorMessage`,
* `id`, `image`, `minDiskSize`, `name`, `private`, `regions`, `sizeGigabytes`, `slug`, `status`,
* `tags`, or `type`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves images
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetImagesSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the images by this key. This may be one of `distribution`, `errorMessage`, `id`,
* `image`, `minDiskSize`, `name`, `private`, `sizeGigabytes`, `slug`, `status`, or `type`.
*/
key: string;
}
export interface GetProjectsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the projects by this key. This may be one of `name`,
* `purpose`, `description`, `environment`, or `isDefault`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves projects
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetProjectsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the projects by this key. This may be one of `name`,
* `purpose`, `description`, or `environment`.
*/
key: string;
}
export interface GetRecordsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the DNS records by this key. This may be one of `domain`, `flags`, `name`, `port`,
* `priority`, `tag`, `ttl`, `type`, `value`, or `weight`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves DNS records
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetRecordsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the DNS records by this key. This may be one of `domain`, `flags`, `name`, `port`,
* `priority`, `tag`, `ttl`, `type`, `value`, or `weight`.
*/
key: string;
}
export interface GetRegionsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the regions by this key. This may be one of `slug`,
* `name`, `available`, `features`, or `sizes`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves regions
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetRegionsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the regions by this key. This may be one of `slug`,
* `name`, or `available`.
*/
key: string;
}
export interface GetSizesFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the sizes by this key. This may be one of `slug`,
* `regions`, `memory`, `vcpus`, `disk`, `transfer`, `priceMonthly`,
* `priceHourly`, or `available`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* Only retrieves sizes which keys has value that matches
* one of the values provided here.
*/
values: string[];
}
export interface GetSizesSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the sizes by this key. This may be one of `slug`,
* `memory`, `vcpus`, `disk`, `transfer`, `priceMonthly`, or `priceHourly`.
*/
key: string;
}
export interface GetSpacesBucketsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the images by this key. This may be one of `bucketDomainName`, `name`, `region`, or `urn`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* A list of values to match against the `key` field. Only retrieves Spaces buckets
* where the `key` field takes on one or more of the values provided here.
*/
values: string[];
}
export interface GetSpacesBucketsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the images by this key. This may be one of `bucketDomainName`, `name`, `region`, or `urn`.
*/
key: string;
}
export interface GetSshKeysFilter {
all?: boolean;
/**
* Filter the SSH Keys by this key. This may be one of `name`, `publicKey`, or `fingerprint`.
*/
key: string;
matchBy?: string;
values: string[];
}
export interface GetSshKeysSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the SSH Keys by this key. This may be one of `name`, `publicKey`, or `fingerprint`.
*/
key: string;
}
export interface GetTagsFilter {
/**
* Set to `true` to require that a field match all of the `values` instead of just one or more of
* them. This is useful when matching against multi-valued fields such as lists or sets where you want to ensure
* that all of the `values` are present in the list or set.
*/
all?: boolean;
/**
* Filter the tags by this key. This may be one of `name`, `totalResourceCount`, `dropletsCount`, `imagesCount`, `volumesCount`, `volumeSnapshotsCount`, or `databasesCount`.
*/
key: string;
/**
* One of `exact` (default), `re`, or `substring`. For string-typed fields, specify `re` to
* match by using the `values` as regular expressions, or specify `substring` to match by treating the `values` as
* substrings to find within the string field.
*/
matchBy?: string;
/**
* Only retrieves tags which keys has value that matches
* one of the values provided here.
*/
values: string[];
}
export interface GetTagsSort {
/**
* The sort direction. This may be either `asc` or `desc`.
*/
direction?: string;
/**
* Sort the tags by this key. This may be one of `name`, `totalResourceCount`, `dropletsCount`, `imagesCount`, `volumesCount`, `volumeSnapshotsCount`, or `databasesCount`.
*/
key: string;
}
export interface KubernetesClusterKubeConfig {
/**
* The base64 encoded public certificate used by clients to access the cluster. Only available if token authentication is not supported on your cluster.
*/
clientCertificate?: pulumi.Input<string>;
/**
* The base64 encoded private key used by clients to access the cluster. Only available if token authentication is not supported on your cluster.
*/
clientKey?: pulumi.Input<string>;
/**
* The base64 encoded public certificate for the cluster's certificate authority.
*/
clusterCaCertificate?: pulumi.Input<string>;
/**
* The date and time when the credentials will expire and need to be regenerated.
*/
expiresAt?: pulumi.Input<string>;
/**
* The URL of the API server on the Kubernetes master node.
*/
host?: pulumi.Input<string>;
/**
* The full contents of the Kubernetes cluster's kubeconfig file.
*/
rawConfig?: pulumi.Input<string>;
/**
* The DigitalOcean API access token used by clients to access the cluster.
*/
token?: pulumi.Input<string>;
}
export interface KubernetesClusterMaintenancePolicy {
/**
* The day of the maintenance window policy. May be one of "monday" through "sunday", or "any" to indicate an arbitrary week day.
*/
day?: pulumi.Input<string>;
duration?: pulumi.Input<string>;
/**
* The start time in UTC of the maintenance window policy in 24-hour clock format / HH:MM notation (e.g., 15:00).
*/
startTime?: pulumi.Input<string>;
}
export interface KubernetesClusterNodePool {
/**
* A computed field representing the actual number of nodes in the node pool, which is especially useful when auto-scaling is enabled.
*/
actualNodeCount?: pulumi.Input<number>;
/**
* Enable auto-scaling of the number of nodes in the node pool within the given min/max range.
*/
autoScale?: pulumi.Input<boolean>;
/**
* A unique ID that can be used to identify and reference the node.
*/
id?: pulumi.Input<string>;
/**
* A map of key/value pairs to apply to nodes in the pool. The labels are exposed in the Kubernetes API as labels in the metadata of the corresponding [Node resources](https://kubernetes.io/docs/concepts/architecture/nodes/).
*/
labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* If auto-scaling is enabled, this represents the maximum number of nodes that the node pool can be scaled up to.
*/
maxNodes?: pulumi.Input<number>;
/**
* If auto-scaling is enabled, this represents the minimum number of nodes that the node pool can be scaled down to.
*/
minNodes?: pulumi.Input<number>;
/**
* A name for the node pool.
*/
name: pulumi.Input<string>;
/**
* The number of Droplet instances in the node pool. If auto-scaling is enabled, this should only be set if the desired result is to explicitly reset the number of nodes to this value. If auto-scaling is enabled, and the node count is outside of the given min/max range, it will use the min nodes value.
*/
nodeCount?: pulumi.Input<number>;
/**
* A list of nodes in the pool. Each node exports the following attributes:
*/
nodes?: pulumi.Input<pulumi.Input<inputs.KubernetesClusterNodePoolNode>[]>;
/**
* The slug identifier for the type of Droplet to be used as workers in the node pool.
*/
size: pulumi.Input<string>;
/**
* A list of tag names to be applied to the Kubernetes cluster.
*/
tags?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A block representing a taint applied to all nodes in the pool. Each taint exports the following attributes (taints must be unique by key and effect pair):
*/
taints?: pulumi.Input<pulumi.Input<inputs.KubernetesClusterNodePoolTaint>[]>;
}
export interface KubernetesClusterNodePoolNode {
/**
* The date and time when the node was created.
*/
createdAt?: pulumi.Input<string>;
/**
* The id of the node's droplet
*/
dropletId?: pulumi.Input<string>;
/**
* A unique ID that can be used to identify and reference the node.
*/
id?: pulumi.Input<string>;
/**
* A name for the node pool.
*/
name?: pulumi.Input<string>;
/**
* A string indicating the current status of the individual node.
*/
status?: pulumi.Input<string>;
/**
* The date and time when the node was last updated.
*/
updatedAt?: pulumi.Input<string>;
}
export interface KubernetesClusterNodePoolTaint {
/**
* How the node reacts to pods that it won't tolerate. Available effect values are: "NoSchedule", "PreferNoSchedule", "NoExecute".
*/
effect: pulumi.Input<string>;
/**
* An arbitrary string. The "key" and "value" fields of the "taint" object form a key-value pair.
*/
key: pulumi.Input<string>;
/**
* An arbitrary string. The "key" and "value" fields of the "taint" object form a key-value pair.
*/
value: pulumi.Input<string>;
}
export interface KubernetesNodePoolNode {
/**
* The date and time when the node was created.
*/
createdAt?: pulumi.Input<string>;
/**
* The id of the node's droplet
*/
dropletId?: pulumi.Input<string>;
/**
* A unique ID that can be used to identify and reference the node.
*/
id?: pulumi.Input<string>;
/**
* A name for the node pool.
*/
name?: pulumi.Input<string>;
/**
* A string indicating the current status of the individual node.
*/
status?: pulumi.Input<string>;
/**
* The date and time when the node was last updated.
*/
updatedAt?: pulumi.Input<string>;
}
export interface KubernetesNodePoolTaint {
/**
* How the node reacts to pods that it won't tolerate. Available effect values are: "NoSchedule", "PreferNoSchedule", "NoExecute".
*/
effect: pulumi.Input<string>;
/**
* An arbitrary string. The "key" and "value" fields of the "taint" object form a key-value pair.
*/
key: pulumi.Input<string>;
/**
* An arbitrary string. The "key" and "value" fields of the "taint" object form a key-value pair.
*/
value: pulumi.Input<string>;
}
export interface LoadBalancerForwardingRule {
/**
* **Deprecated** The ID of the TLS certificate to be used for SSL termination.
*
* @deprecated Certificate IDs may change, for example when a Let's Encrypt certificate is auto-renewed. Please specify 'certificate_name' instead.
*/
certificateId?: pulumi.Input<string>;
/**
* The unique name of the TLS certificate to be used for SSL termination.
*/
certificateName?: pulumi.Input<string>;
/**
* An integer representing the port on which the Load Balancer instance will listen.
*/
entryPort: pulumi.Input<number>;
/**
* The protocol used for traffic to the Load Balancer. The possible values are: `http`, `https`, `http2` or `tcp`.
*/
entryProtocol: pulumi.Input<string>;
/**
* An integer representing the port on the backend Droplets to which the Load Balancer will send traffic.
*/
targetPort: pulumi.Input<number>;
/**
* The protocol used for traffic from the Load Balancer to the backend Droplets. The possible values are: `http`, `https`, `http2` or `tcp`.
*/
targetProtocol: pulumi.Input<string>;
/**
* A boolean value indicating whether SSL encrypted traffic will be passed through to the backend Droplets. The default value is `false`.
*/
tlsPassthrough?: pulumi.Input<boolean>;
}
export interface LoadBalancerHealthcheck {
/**
* The number of seconds between between two consecutive health checks. If not specified, the default value is `10`.
*/
checkIntervalSeconds?: pulumi.Input<number>;
/**
* The number of times a health check must pass for a backend Droplet to be marked "healthy" and be re-added to the pool. If not specified, the default value is `5`.
*/
healthyThreshold?: pulumi.Input<number>;
/**
* The path on the backend Droplets to which the Load Balancer instance will send a request.
*/
path?: pulumi.Input<string>;
/**
* An integer representing the port on the backend Droplets on which the health check will attempt a connection.
*/
port: pulumi.Input<number>;
/**
* The protocol used for health checks sent to the backend Droplets. The possible values are `http`, `https` or `tcp`.
*/
protocol: pulumi.Input<string>;
/**
* The number of seconds the Load Balancer instance will wait for a response until marking a health check as failed. If not specified, the default value is `5`.
*/
responseTimeoutSeconds?: pulumi.Input<number>;
/**
* The number of times a health check must fail for a backend Droplet to be marked "unhealthy" and be removed from the pool. If not specified, the default value is `3`.
*/
unhealthyThreshold?: pulumi.Input<number>;
}
export interface LoadBalancerStickySessions {
/**
* The name to be used for the cookie sent to the client. This attribute is required when using `cookies` for the sticky sessions type.
*/
cookieName?: pulumi.Input<string>;
/**
* The number of seconds until the cookie set by the Load Balancer expires. This attribute is required when using `cookies` for the sticky sessions type.
*/
cookieTtlSeconds?: pulumi.Input<number>;
/**
* An attribute indicating how and if requests from a client will be persistently served by the same backend Droplet. The possible values are `cookies` or `none`. If not specified, the default value is `none`.
*/
type?: pulumi.Input<string>;
}
export interface MonitorAlertAlerts {
emails?: pulumi.Input<pulumi.Input<string>[]>;
slacks?: pulumi.Input<pulumi.Input<inputs.MonitorAlertAlertsSlack>[]>;
}
export interface MonitorAlertAlertsSlack {
channel: pulumi.Input<string>;
url: pulumi.Input<string>;
}
export interface SpacesBucketCorsRule {
/**
* A list of headers that will be included in the CORS preflight request's `Access-Control-Request-Headers`. A header may contain one wildcard (e.g. `x-amz-*`).
*/
allowedHeaders?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A list of HTTP methods (e.g. `GET`) which are allowed from the specified origin.
*/
allowedMethods: pulumi.Input<pulumi.Input<string>[]>;
/**
* A list of hosts from which requests using the specified methods are allowed. A host may contain one wildcard (e.g. http://*.example.com).
*/
allowedOrigins: pulumi.Input<pulumi.Input<string>[]>;
/**
* The time in seconds that browser can cache the response for a preflight request.
*/
maxAgeSeconds?: pulumi.Input<number>;
}
export interface SpacesBucketLifecycleRule {
/**
* Specifies the number of days after initiating a multipart
* upload when the multipart upload must be completed or else Spaces will abort the upload.
*/
abortIncompleteMultipartUploadDays?: pulumi.Input<number>;
/**
* Specifies lifecycle rule status.
*/
enabled: pulumi.Input<boolean>;
/**
* Specifies a time period after which applicable objects expire (documented below).
*/
expiration?: pulumi.Input<inputs.SpacesBucketLifecycleRuleExpiration>;
/**
* Unique identifier for the rule.
*/
id?: pulumi.Input<string>;
/**
* Specifies when non-current object versions expire (documented below).
*/
noncurrentVersionExpiration?: pulumi.Input<inputs.SpacesBucketLifecycleRuleNoncurrentVersionExpiration>;
/**
* Object key prefix identifying one or more objects to which the rule applies.
*/
prefix?: pulumi.Input<string>;
}
export interface SpacesBucketLifecycleRuleExpiration {
/**
* Specifies the date/time after which you want applicable objects to expire. The argument uses
* RFC3339 format, e.g. "2020-03-22T15:03:55Z" or parts thereof e.g. "2019-02-28".
*/
date?: pulumi.Input<string>;
/**
* Specifies the number of days after object creation when the applicable objects will expire.
*/
days?: pulumi.Input<number>;
/**
* On a versioned bucket (versioning-enabled or versioning-suspended
* bucket), setting this to true directs Spaces to delete expired object delete markers.
*/
expiredObjectDeleteMarker?: pulumi.Input<boolean>;
}
export interface SpacesBucketLifecycleRuleNoncurrentVersionExpiration {
/**
* Specifies the number of days after which an object's non-current versions expire.
*/
days?: pulumi.Input<number>;
}
export interface SpacesBucketVersioning {
/**
* Enable versioning. Once you version-enable a bucket, it can never return to an unversioned
* state. You can, however, suspend versioning on that bucket.
*/
enabled?: pulumi.Input<boolean>;
} | the_stack |
module android.graphics.drawable {
import Resources = android.content.res.Resources;
import SystemClock = android.os.SystemClock;
import System = java.lang.System;
import Runnable = java.lang.Runnable;
import Animatable = android.graphics.drawable.Animatable;
import Drawable = android.graphics.drawable.Drawable;
import DrawableContainer = android.graphics.drawable.DrawableContainer;
import TypedArray = android.content.res.TypedArray;
/**
*
* An object used to create frame-by-frame animations, defined by a series of Drawable objects,
* which can be used as a View object's background.
* <p>
* The simplest way to create a frame-by-frame animation is to define the animation in an XML
* file, placed in the res/drawable/ folder, and set it as the background to a View object. Then, call
* {@link #start()} to run the animation.
* <p>
* An AnimationDrawable defined in XML consists of a single <code><animation-list></code> element,
* and a series of nested <code><item></code> tags. Each item defines a frame of the animation.
* See the example below.
* </p>
* <p>spin_animation.xml file in res/drawable/ folder:</p>
* <pre><!-- Animation frames are wheel0.png -- wheel5.png files inside the
* res/drawable/ folder -->
* <animation-list android:id="@+id/selected" android:oneshot="false">
* <item android:drawable="@drawable/wheel0" android:duration="50" />
* <item android:drawable="@drawable/wheel1" android:duration="50" />
* <item android:drawable="@drawable/wheel2" android:duration="50" />
* <item android:drawable="@drawable/wheel3" android:duration="50" />
* <item android:drawable="@drawable/wheel4" android:duration="50" />
* <item android:drawable="@drawable/wheel5" android:duration="50" />
* </animation-list></pre>
*
* <p>Here is the code to load and play this animation.</p>
* <pre>
* // Load the ImageView that will host the animation and
* // set its background to our AnimationDrawable XML resource.
* ImageView img = (ImageView)findViewById(R.id.spinning_wheel_image);
* img.setBackgroundResource(R.drawable.spin_animation);
*
* // Get the background, which has been compiled to an AnimationDrawable object.
* AnimationDrawable frameAnimation = (AnimationDrawable) img.getBackground();
*
* // Start the animation (looped playback by default).
* frameAnimation.start();
* </pre>
*
* <div class="special reference">
* <h3>Developer Guides</h3>
* <p>For more information about animating with {@code AnimationDrawable}, read the
* <a href="{@docRoot}guide/topics/graphics/drawable-animation.html">Drawable Animation</a>
* developer guide.</p>
* </div>
*
* @attr ref android.R.styleable#AnimationDrawable_visible
* @attr ref android.R.styleable#AnimationDrawable_variablePadding
* @attr ref android.R.styleable#AnimationDrawable_oneshot
* @attr ref android.R.styleable#AnimationDrawableItem_duration
* @attr ref android.R.styleable#AnimationDrawableItem_drawable
*/
export class AnimationDrawable extends DrawableContainer implements Runnable, Animatable {
private mAnimationState:AnimationDrawable.AnimationState;
private mCurFrame:number = -1;
//private mMutated:boolean;
constructor(state?:AnimationDrawable.AnimationState) {
super();
let _as:AnimationDrawable.AnimationState = new AnimationDrawable.AnimationState(state, this);
this.mAnimationState = _as;
this.setConstantState(_as);
if (state != null) {
this.setFrame(0, true, false);
}
}
setVisible(visible:boolean, restart:boolean):boolean {
let changed:boolean = super.setVisible(visible, restart);
if (visible) {
if (changed || restart) {
this.setFrame(0, true, true);
}
} else {
this.unscheduleSelf(this);
}
return changed;
}
/**
* <p>Starts the animation, looping if necessary. This method has no effect
* if the animation is running. Do not call this in the {@link android.app.Activity#onCreate}
* method of your activity, because the {@link android.graphics.drawable.AnimationDrawable} is
* not yet fully attached to the window. If you want to play
* the animation immediately, without requiring interaction, then you might want to call it
* from the {@link android.app.Activity#onWindowFocusChanged} method in your activity,
* which will get called when Android brings your window into focus.</p>
*
* @see #isRunning()
* @see #stop()
*/
start():void {
if (!this.isRunning()) {
this.run();
}
}
/**
* <p>Stops the animation. This method has no effect if the animation is
* not running.</p>
*
* @see #isRunning()
* @see #start()
*/
stop():void {
if (this.isRunning()) {
this.unscheduleSelf(this);
}
}
/**
* <p>Indicates whether the animation is currently running or not.</p>
*
* @return true if the animation is running, false otherwise
*/
isRunning():boolean {
return this.mCurFrame > -1;
}
/**
* <p>This method exists for implementation purpose only and should not be
* called directly. Invoke {@link #start()} instead.</p>
*
* @see #start()
*/
run():void {
this.nextFrame(false);
}
unscheduleSelf(what:Runnable):void {
this.mCurFrame = -1;
super.unscheduleSelf(what);
}
/**
* @return The number of frames in the animation
*/
getNumberOfFrames():number {
return this.mAnimationState.getChildCount();
}
/**
* @return The Drawable at the specified frame index
*/
getFrame(index:number):Drawable {
return this.mAnimationState.getChild(index);
}
/**
* @return The duration in milliseconds of the frame at the
* specified index
*/
getDuration(i:number):number {
return this.mAnimationState.mDurations[i];
}
/**
* @return True of the animation will play once, false otherwise
*/
isOneShot():boolean {
return this.mAnimationState.mOneShot;
}
/**
* Sets whether the animation should play once or repeat.
*
* @param oneShot Pass true if the animation should only play once
*/
setOneShot(oneShot:boolean):void {
this.mAnimationState.mOneShot = oneShot;
}
/**
* Add a frame to the animation
*
* @param frame The frame to add
* @param duration How long in milliseconds the frame should appear
*/
addFrame(frame:Drawable, duration:number):void {
this.mAnimationState.addFrame(frame, duration);
if (this.mCurFrame < 0) {
this.setFrame(0, true, false);
}
}
private nextFrame(unschedule:boolean):void {
let next:number = this.mCurFrame + 1;
const N:number = this.mAnimationState.getChildCount();
if (next >= N) {
next = 0;
}
this.setFrame(next, unschedule, !this.mAnimationState.mOneShot || next < (N - 1));
}
private setFrame(frame:number, unschedule:boolean, animate:boolean):void {
if (frame >= this.mAnimationState.getChildCount()) {
return;
}
this.mCurFrame = frame;
this.selectDrawable(frame);
if (unschedule) {
this.unscheduleSelf(this);
}
if (animate) {
// Unscheduling may have clobbered this value; restore it to record that we're animating
this.mCurFrame = frame;
this.scheduleSelf(this, SystemClock.uptimeMillis() + this.mAnimationState.mDurations[frame]);
}
}
inflate(r:Resources, parser:HTMLElement):void {
super.inflate(r, parser);
let a:TypedArray = r.obtainAttributes(parser);
this.mAnimationState.setVariablePadding(a.getBoolean("android:variablePadding", false));
this.mAnimationState.mOneShot = a.getBoolean("android:oneshot", false);
a.recycle();
//parse children
for (let child of Array.from(parser.children)) {
let item = <HTMLElement>child;
if (item.tagName.toLowerCase() !== 'item') {
continue;
}
a = r.obtainAttributes(item);
let duration:number = a.getInt("android:duration", -1);
if (duration < 0) {
throw Error(`new XmlPullParserException(parser.getPositionDescription() + ": <item> tag requires a 'duration' attribute")`);
}
let dr:Drawable = a.getDrawable("android:drawable");
a.recycle();
if (!dr && item.children[0] instanceof HTMLElement) {
dr = Drawable.createFromXml(r, <HTMLElement>item.children[0]);
}
if (!dr) {
throw Error(`new XmlPullParserException(<item> tag requires a 'drawable' attribute or child tag defining a drawable)`);
}
this.mAnimationState.addFrame(dr, duration);
if (dr != null) {
dr.setCallback(this);
}
}
this.setFrame(0, true, false);
}
mutate():Drawable {
if (!this.mMutated && super.mutate() == this) {
this.mAnimationState.mDurations = [...this.mAnimationState.mDurations];
this.mMutated = true;
}
return this;
}
}
export module AnimationDrawable {
export class AnimationState extends DrawableContainer.DrawableContainerState {
private mDurations:number[];
private mOneShot:boolean;
constructor(orig:AnimationState, owner:AnimationDrawable) {
super(orig, owner);
if (orig != null) {
this.mDurations = orig.mDurations;
this.mOneShot = orig.mOneShot;
} else {
this.mDurations = androidui.util.ArrayCreator.newNumberArray(this.getCapacity());
this.mOneShot = true;
}
}
newDrawable():Drawable {
return new AnimationDrawable(this);
}
addFrame(dr:Drawable, dur:number):void {
// Do not combine the following. The array index must be evaluated before
// the array is accessed because super.addChild(dr) has a side effect on mDurations.
let pos:number = super.addChild(dr);
this.mDurations[pos] = dur;
}
//growArray(oldSize:number, newSize:number):void {
// super.growArray(oldSize, newSize);
// let newDurations:number[] = androidui.util.ArrayCreator.newNumberArray(newSize);
// System.arraycopy(this.mDurations, 0, newDurations, 0, oldSize);
// this.mDurations = newDurations;
//}
}
}
} | the_stack |
import { Disclosure, DisclosureButton, DisclosurePanel } from ".";
import { suppressConsoleLogs } from "$lib/test-utils/suppress-console-logs";
import { render } from "@testing-library/svelte";
import TestRenderer from "$lib/test-utils/TestRenderer.svelte";
import {
assertActiveElement,
assertDisclosureButton,
assertDisclosurePanel,
DisclosureState,
getByText,
getDisclosureButton,
getDisclosurePanel,
} from "$lib/test-utils/accessibility-assertions";
import { click, Keys, MouseButton, press } from "$lib/test-utils/interactions";
import { Transition, TransitionChild } from "../transitions";
import TransitionDebug from "./_TransitionDebug.svelte";
import svelte from "svelte-inline-compile";
let mockId = 0;
jest.mock("../../hooks/use-id", () => {
return {
useId: jest.fn(() => ++mockId),
};
});
beforeEach(() => (mockId = 0));
afterAll(() => jest.restoreAllMocks());
function nextFrame() {
return new Promise<void>((resolve) => {
requestAnimationFrame(() => {
requestAnimationFrame(() => {
resolve();
});
});
});
}
describe("Safe guards", () => {
it.each([
["DisclosureButton", DisclosureButton],
["DisclosurePanel", DisclosurePanel],
])(
"should error when we are using a <%s /> without a parent <Disclosure />",
suppressConsoleLogs((name, Component) => {
expect(() => render(Component)).toThrowError(
`<${name} /> is missing a parent <Disclosure /> component.`
);
})
);
it(
"should be possible to render a Disclosure without crashing",
suppressConsoleLogs(async () => {
render(TestRenderer, {
allProps: [
Disclosure,
{},
[
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
],
],
});
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: "headlessui-disclosure-button-1" },
});
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted });
})
);
});
describe("Rendering", () => {
describe('Disclosure', () => {
it(
'should render a Disclosure with slot props',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure let:open>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel>Panel is: {open ? 'open' : 'closed'}</DisclosurePanel>
</Disclosure>
`)
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
await click(getDisclosureButton())
assertDisclosureButton({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.Visible, textContent: 'Panel is: open' })
})
)
it('should be possible to render a Disclosure in an open state by default', async () => {
render(svelte`
<Disclosure defaultOpen let:open>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel>Panel is: {open ? 'open' : 'closed'}</DisclosurePanel>
</Disclosure>
`)
assertDisclosureButton({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.Visible, textContent: 'Panel is: open' })
await click(getDisclosureButton())
assertDisclosureButton({ state: DisclosureState.InvisibleUnmounted })
})
it(
'should expose a close function that closes the disclosure',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure let:close>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel>
<button on:click={() => close()}>Close me</button>
</DisclosurePanel>
</Disclosure>
`)
// Focus the button
getDisclosureButton()?.focus()
// Ensure the button is focused
assertActiveElement(getDisclosureButton())
// Open the disclosure
await click(getDisclosureButton())
// Ensure we can click the close button
await click(getByText('Close me'))
// Ensure the disclosure is closed
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Ensure the DisclosureButton got the restored focus
assertActiveElement(getByText('Trigger'))
})
)
it(
'should expose a close function that closes the disclosure and restores to a specific element',
suppressConsoleLogs(async () => {
render(svelte`
<button id="test">restoreable</button>
<Disclosure let:close>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel>
<button on:click={() => close(document.getElementById('test'))}>
Close me
</button>
</DisclosurePanel>
</Disclosure>
`)
// Focus the button
getDisclosureButton()?.focus()
// Ensure the button is focused
assertActiveElement(getDisclosureButton())
// Open the disclosure
await click(getDisclosureButton())
// Ensure we can click the close button
await click(getByText('Close me'))
// Ensure the disclosure is closed
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Ensure the restoreable button got the restored focus
assertActiveElement(getByText('restoreable'))
})
)
})
describe("DisclosureButton", () => {
it(
'should render a DisclosureButton with slot props',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure let:open>
<DisclosureButton>{JSON.stringify({ open })}</DisclosureButton>
<DisclosurePanel></DisclosurePanel>
</Disclosure>
`)
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
textContent: JSON.stringify({ open: false }),
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
await click(getDisclosureButton())
assertDisclosureButton({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-button-1' },
textContent: JSON.stringify({ open: true }),
})
assertDisclosurePanel({ state: DisclosureState.Visible })
})
)
it(
'should be possible to render a DisclosureButton using a render prop and an `as` prop',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure>
<DisclosureButton as="div" role="button" let:open>
{JSON.stringify({ open })}
</DisclosureButton>
<DisclosurePanel />
</Disclosure>
`)
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
textContent: JSON.stringify({ open: false }),
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
await click(getDisclosureButton())
assertDisclosureButton({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-button-1' },
textContent: JSON.stringify({ open: true }),
})
assertDisclosurePanel({ state: DisclosureState.Visible })
})
)
describe('`type` attribute', () => {
it('should set the `type` to "button" by default', async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {},
[
[DisclosureButton, {}, "Trigger"]
]
]
})
expect(getDisclosureButton()).toHaveAttribute('type', 'button')
})
it('should not set the `type` to "button" if it already contains a `type`', async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {},
[
[DisclosureButton, { type: "submit" }, "Trigger"]
]
]
})
expect(getDisclosureButton()).toHaveAttribute('type', 'submit')
})
it('should not set the type if the "as" prop is not a "button"', async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {},
[
[DisclosureButton, { as: "div" }, "Trigger"]
]
]
})
expect(getDisclosureButton()).not.toHaveAttribute('type')
})
})
})
describe('DisclosurePanel', () => {
it(
'should render a DisclosurePanel with slot props',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel let:open>{JSON.stringify({ open })}</DisclosurePanel>
</Disclosure>
`)
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
await click(getDisclosureButton())
assertDisclosureButton({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({
state: DisclosureState.Visible,
textContent: JSON.stringify({ open: true }),
})
})
)
it('should be possible to always render the DisclosurePanel if we provide it a `static` prop', () => {
render(
TestRenderer, {
allProps: [
Disclosure, {},
[
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, { static: true }, "Contents"]
]
]
})
// Let's verify that the Disclosure is already there
expect(getDisclosurePanel()).not.toBe(null)
})
it('should be possible to use a different render strategy for the DisclosurePanel', async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {},
[
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, { unmount: false }, "Contents"]
]
]
})
assertDisclosureButton({ state: DisclosureState.InvisibleHidden })
assertDisclosurePanel({ state: DisclosureState.InvisibleHidden })
// Let's open the Disclosure, to see if it is not hidden anymore
await click(getDisclosureButton())
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({ state: DisclosureState.Visible })
// Let's re-click the Disclosure, to see if it is hidden again
await click(getDisclosureButton())
assertDisclosureButton({ state: DisclosureState.InvisibleHidden })
assertDisclosurePanel({ state: DisclosureState.InvisibleHidden })
})
it(
'should expose a close function that closes the disclosure',
suppressConsoleLogs(async () => {
render(svelte`
<Disclosure>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel let:close>
<button on:click={() => close()}>Close me</button>
</DisclosurePanel>
</Disclosure>
`)
// Focus the button
getDisclosureButton()?.focus()
// Ensure the button is focused
assertActiveElement(getDisclosureButton())
// Open the disclosure
await click(getDisclosureButton())
// Ensure we can click the close button
await click(getByText('Close me'))
// Ensure the disclosure is closed
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Ensure the DisclosureButton got the restored focus
assertActiveElement(getByText('Trigger'))
})
)
it(
'should expose a close function that closes the disclosure and restores to a specific element',
suppressConsoleLogs(async () => {
render(svelte`
<button id="test">restoreable</button>
<Disclosure>
<DisclosureButton>Trigger</DisclosureButton>
<DisclosurePanel let:close>
<button on:click={() => close(document.getElementById('test'))}>Close me</button>
</DisclosurePanel>
</Disclosure>
`)
// Focus the button
getDisclosureButton()?.focus()
// Ensure the button is focused
assertActiveElement(getDisclosureButton())
// Open the disclosure
await click(getDisclosureButton())
// Ensure we can click the close button
await click(getByText('Close me'))
// Ensure the disclosure is closed
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Ensure the restoreable button got the restored focus
assertActiveElement(getByText('restoreable'))
})
)
})
})
describe('Composition', () => {
it(
'should be possible to control the DisclosurePanel by wrapping it in a Transition component',
suppressConsoleLogs(async () => {
let orderFn = jest.fn()
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[TransitionDebug, { name: "Disclosure", fn: orderFn }],
[Transition, {}, [
[TransitionDebug, { name: "Transition", fn: orderFn }],
[DisclosurePanel, {}, [
[TransitionChild, {}, [
[TransitionDebug, { name: "TransitionChild", fn: orderFn }],
]]
]]
]]
]
]
})
// Verify the Disclosure is hidden
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Open the Disclosure component
await click(getDisclosureButton())
// Verify the Disclosure is visible
assertDisclosurePanel({ state: DisclosureState.Visible })
// Unmount the full tree
await click(getDisclosureButton())
// Wait for all transitions to finish
await nextFrame()
// Verify that we tracked the `mounts` and `unmounts` in the correct order
// Note that with Svelte the components are unmounted top-down instead of bottom-up as with React
expect(orderFn.mock.calls).toEqual([
['Mounting - Disclosure'],
['Mounting - Transition'],
['Mounting - TransitionChild'],
['Unmounting - Transition'],
['Unmounting - TransitionChild'],
])
})
)
})
describe('Keyboard interactions', () => {
describe('`Enter` key', () => {
it(
'should be possible to open the Disclosure with Enter',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Open disclosure
await press(Keys.Enter)
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-panel-2' },
})
// Close disclosure
await press(Keys.Enter)
assertDisclosureButton({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should not be possible to open the disclosure with Enter when the button is disabled',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, { disabled: true }, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Try to open the disclosure
await press(Keys.Enter)
// Verify it is still closed
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should be possible to close the disclosure with Enter when the disclosure is open',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Open disclosure
await press(Keys.Enter)
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-panel-2' },
})
// Close disclosure
await press(Keys.Enter)
// Verify it is closed again
assertDisclosureButton({ state: DisclosureState.InvisibleUnmounted })
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
})
describe('`Space` key', () => {
it(
'should be possible to open the disclosure with Space',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Open disclosure
await press(Keys.Space)
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-panel-2' },
})
})
)
it(
'should not be possible to open the disclosure with Space when the button is disabled',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, { disabled: true }, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Try to open the disclosure
await press(Keys.Space)
// Verify it is still closed
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should be possible to close the disclosure with Space when the disclosure is open',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Focus the button
getDisclosureButton()?.focus()
// Open disclosure
await press(Keys.Space)
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-panel-2' },
})
// Close disclosure
await press(Keys.Space)
// Verify it is closed again
assertDisclosureButton({ state: DisclosureState.InvisibleUnmounted })
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
})
})
describe('Mouse interactions', () => {
it(
'should be possible to open a disclosure on click',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Open disclosure
await click(getDisclosureButton())
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
assertDisclosurePanel({
state: DisclosureState.Visible,
attributes: { id: 'headlessui-disclosure-panel-2' },
})
})
)
it(
'should not be possible to open a disclosure on right click',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Open disclosure
await click(getDisclosureButton(), MouseButton.Right)
// Verify it is still closed
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should not be possible to open a disclosure on click when the button is disabled',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, { disabled: true }, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Try to open the disclosure
await click(getDisclosureButton())
// Verify it is still closed
assertDisclosureButton({
state: DisclosureState.InvisibleUnmounted,
attributes: { id: 'headlessui-disclosure-button-1' },
})
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should be possible to close a disclosure on click',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, "Contents"],
]
]
})
// Open disclosure
await click(getDisclosureButton())
// Verify it is open
assertDisclosureButton({ state: DisclosureState.Visible })
// Click to close
await click(getDisclosureButton())
// Verify it is closed
assertDisclosureButton({ state: DisclosureState.InvisibleUnmounted })
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
})
)
it(
'should be possible to close the Disclosure by clicking on a DisclosureButton inside a DisclosurePanel',
suppressConsoleLogs(async () => {
render(
TestRenderer, {
allProps: [
Disclosure, {}, [
[DisclosureButton, {}, "Trigger"],
[DisclosurePanel, {}, [
[DisclosureButton, {}, "Close"]
]]
]
]
})
// Open the disclosure
await click(getDisclosureButton())
let closeBtn = getByText('Close')
expect(closeBtn).not.toHaveAttribute('id')
expect(closeBtn).not.toHaveAttribute('aria-controls')
expect(closeBtn).not.toHaveAttribute('aria-expanded')
// The close button should close the disclosure
await click(closeBtn)
// Verify it is closed
assertDisclosurePanel({ state: DisclosureState.InvisibleUnmounted })
// Verify we restored the Open button
assertActiveElement(getDisclosureButton())
})
)
}) | the_stack |
import produce, { Draft } from "immer";
import * as R from "ramda";
import { Client, Api, Model, Blob, Crypto, Graph, Rbac } from "@core/types";
import {
getCurrentEncryptedKeys,
getConnectedBlockEnvironmentsForApp,
} from "@core/lib/graph";
import {
keySetDifference,
getUserEncryptedKeyOrBlobComposite,
parseUserEncryptedKeyOrBlobComposite,
} from "@core/lib/blob";
import {
encrypt,
encryptSymmetricWithKey,
signJson,
} from "@core/lib/crypto/proxy";
import { symmetricEncryptionKey } from "@core/lib/crypto/utils";
import { verifyOrgKeyable } from "../trust";
import {
getTrustChain,
getAuth,
getInheritanceOverrides,
} from "@core/lib/client";
import set from "lodash.set";
export const keySetForGraphProposal = (
graph: Client.Graph.UserGraph,
now: number,
producer: (
graphDraft: Draft<Client.Graph.UserGraph>
) => void | Client.Graph.UserGraph,
scope: Rbac.OrgAccessScope = "all"
): Blob.KeySet => {
// log("keySetForGraphProposal", { scope });
// const start = Date.now();
const currentKeys = getCurrentEncryptedKeys(graph, scope, now, true);
// log("currentKeys " + (Date.now() - start).toString());
const proposedGraph = produce(graph, producer);
// log("proposedGraph " + (Date.now() - start).toString());
const proposedKeys = getCurrentEncryptedKeys(
proposedGraph,
scope,
now,
true
);
// log("proposedKeys " + (Date.now() - start).toString());
const diff = keySetDifference(proposedKeys, currentKeys);
// log("got diff " + (Date.now() - start).toString());
// log("keySetForGraphProposal finished " + (Date.now() - start).toString());
return diff;
},
requiredEnvsForKeySet = (
graph: Client.Graph.UserGraph,
toSet: Blob.KeySet
) => {
const requiredEnvs = new Set<string>(),
requiredChangesets = new Set<string>();
if (toSet.users) {
for (let userId in toSet.users) {
for (let deviceId in toSet.users[userId]) {
const deviceToSet = toSet.users[userId][deviceId];
for (let envParentId in deviceToSet) {
const { environments, locals } = deviceToSet[envParentId];
if (environments) {
for (let environmentId in environments) {
const environmentToSet = environments[environmentId];
if (
environmentToSet.env ||
environmentToSet.meta ||
environmentToSet.inherits
) {
requiredEnvs.add(envParentId);
}
if (environmentToSet.changesets) {
requiredChangesets.add(envParentId);
}
}
}
if (locals) {
requiredEnvs.add(envParentId);
}
}
}
}
}
if (toSet.blockKeyableParents) {
for (let blockId in toSet.blockKeyableParents) {
requiredEnvs.add(blockId);
}
}
if (toSet.keyableParents) {
for (let keyableParentId in toSet.keyableParents) {
const keyableParent = graph[keyableParentId] as Model.KeyableParent;
requiredEnvs.add(keyableParent.appId);
}
}
return {
requiredEnvs,
requiredChangesets,
};
},
encryptedKeyParamsForKeySet = async (params: {
state: Client.State;
context: Client.Context;
toSet: Blob.KeySet;
}) => {
const { state, context, toSet } = params,
currentAuth = getAuth(state, context.accountIdOrCliKey);
if (!currentAuth || !currentAuth.privkey) {
throw new Error("Action requires authentication and decrypted privkey");
}
const privkey = currentAuth.privkey,
toVerifyKeyableIds = new Set<string>(),
toEncryptKeys: [string[], Parameters<typeof encrypt>[0]][] = [],
toEncryptBlobs: [
string[],
Parameters<typeof encryptSymmetricWithKey>[0]
][] = [],
inheritanceOverridesByEnvironmentId = R.groupBy(
([composite]) =>
parseUserEncryptedKeyOrBlobComposite(composite).environmentId,
R.toPairs(state.envs).filter(
([composite]) =>
parseUserEncryptedKeyOrBlobComposite(composite)
.inheritsEnvironmentId
)
);
let keys = {} as Api.Net.EnvParams["keys"];
let blobs = {} as Api.Net.EnvParams["blobs"];
if (toSet.users) {
for (let userId in toSet.users) {
const user = state.graph[userId] as Model.OrgUser | Model.CliUser;
if (user.type == "cliUser") {
toVerifyKeyableIds.add(userId);
}
for (let deviceId in toSet.users[userId]) {
let pubkey: Crypto.Pubkey;
if (deviceId == "cli" && user.type == "cliUser") {
pubkey = user.pubkey;
} else {
pubkey = (
state.graph[deviceId] as
| Model.OrgUserDevice
| Model.Invite
| Model.DeviceGrant
).pubkey!;
toVerifyKeyableIds.add(deviceId);
}
const deviceToSet = toSet.users[userId][deviceId];
for (let envParentId in deviceToSet) {
const { environments, locals } = deviceToSet[envParentId];
if (environments) {
for (let environmentId in environments) {
const environmentToSet = environments[environmentId];
if (environmentToSet.env) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({ environmentId })
]?.key;
if (key) {
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"environments",
environmentId,
"env",
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
if (environmentToSet.meta) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId,
envPart: "meta",
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"environments",
environmentId,
"meta",
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
if (environmentToSet.inherits) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId,
envPart: "inherits",
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"environments",
environmentId,
"inherits",
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
if (environmentToSet.changesets) {
const { key } = state.changesets[environmentId] ?? {};
if (key) {
const path = [
"users",
userId,
deviceId,
envParentId,
"environments",
environmentId,
"changesets",
];
toEncryptKeys.push([
path,
{
data: key,
pubkey,
privkey,
},
]);
}
}
// inheritance overrides
if (environmentToSet.env) {
const environmentInheritanceOverrides =
inheritanceOverridesByEnvironmentId[environmentId] ?? [];
for (let [
composite,
{ key },
] of environmentInheritanceOverrides) {
const { inheritsEnvironmentId } =
parseUserEncryptedKeyOrBlobComposite(composite);
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"environments",
environmentId,
"inheritanceOverrides",
inheritsEnvironmentId!,
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
}
}
if (locals) {
for (let localsUserId in locals) {
const environmentId = envParentId + "|" + localsUserId;
const localsToSet = locals[localsUserId];
if (localsToSet.env) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"locals",
localsUserId,
"env",
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
if (localsToSet.meta) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId,
envPart: "meta",
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"users",
userId,
deviceId,
envParentId,
"locals",
localsUserId,
"meta",
],
{
data: key,
pubkey,
privkey,
},
]);
}
}
if (localsToSet.changesets) {
const { key } =
state.changesets[envParentId + "|" + localsUserId] ?? {};
if (key) {
const path = [
"users",
userId,
deviceId,
envParentId,
"locals",
localsUserId,
"changesets",
];
toEncryptKeys.push([
path,
{
data: key,
pubkey,
privkey,
},
]);
}
}
}
}
}
}
}
}
if (toSet.keyableParents) {
for (let keyableParentId in toSet.keyableParents) {
toVerifyKeyableIds.add(keyableParentId);
const keyableParent = state.graph[
keyableParentId
] as Model.KeyableParent,
environment = state.graph[
keyableParent.environmentId
] as Model.Environment;
let inheritanceOverrides = getInheritanceOverrides(state, {
envParentId: keyableParent.appId,
environmentId: environment.id,
});
// for sub-environment, also include parent environment overrides
if (environment.isSub) {
inheritanceOverrides = R.mergeDeepRight(
getInheritanceOverrides(state, {
envParentId: keyableParent.appId,
environmentId: environment.parentEnvironmentId,
}),
inheritanceOverrides
) as typeof inheritanceOverrides;
}
const generatedEnvkeyId = Object.keys(
toSet.keyableParents[keyableParentId]
)[0],
generatedEnvkey = state.graph[
generatedEnvkeyId
] as Model.GeneratedEnvkey,
envkeyToSet =
toSet.keyableParents[keyableParentId][generatedEnvkeyId];
if (envkeyToSet.env) {
const composite = getUserEncryptedKeyOrBlobComposite({
environmentId: environment.isSub
? environment.parentEnvironmentId
: environment.id,
});
const key = state.envs[composite]?.key;
if (key) {
toEncryptKeys.push([
[
"keyableParents",
keyableParent.id,
generatedEnvkey.id,
"env",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
if (envkeyToSet.subEnv) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId: environment.id,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"keyableParents",
keyableParent.id,
generatedEnvkey.id,
"subEnv",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
if (envkeyToSet.localOverrides && keyableParent.type == "localKey") {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId: keyableParent.appId + "|" + keyableParent.userId,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"keyableParents",
keyableParent.id,
generatedEnvkey.id,
"localOverrides",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
// inheritance overrides
if (!R.isEmpty(inheritanceOverrides)) {
for (let inheritanceOverridesEnvironmentId in inheritanceOverrides) {
const composite = getUserEncryptedKeyOrBlobComposite({
environmentId: keyableParent.environmentId,
inheritsEnvironmentId: inheritanceOverridesEnvironmentId,
});
const environment = state.graph[
keyableParent.environmentId
] as Model.Environment;
let key = state.envs[composite]?.key;
if (!key) {
key = symmetricEncryptionKey();
const blobData = JSON.stringify(
inheritanceOverrides[inheritanceOverridesEnvironmentId]
);
toEncryptBlobs.push([
[
environment.envParentId,
"environments",
environment.id,
"inheritanceOverrides",
inheritanceOverridesEnvironmentId,
],
{
data: blobData,
encryptionKey: key,
},
]);
}
toEncryptKeys.push([
[
"keyableParents",
keyableParent.id,
generatedEnvkey.id,
"inheritanceOverrides",
inheritanceOverridesEnvironmentId,
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
}
}
if (toSet.blockKeyableParents) {
for (let blockId in toSet.blockKeyableParents) {
for (let keyableParentId in toSet.blockKeyableParents[blockId]) {
toVerifyKeyableIds.add(keyableParentId);
const keyableParent = state.graph[
keyableParentId
] as Model.KeyableParent,
appEnvironment = state.graph[
keyableParent.environmentId
] as Model.Environment,
blockEnvironment = getConnectedBlockEnvironmentsForApp(
state.graph,
keyableParent.appId,
blockId,
appEnvironment.id
)[0];
let inheritanceOverrides = getInheritanceOverrides(state, {
envParentId: blockId,
environmentId: blockEnvironment.id,
});
// for sub-environment, also include parent environment overrides
if (blockEnvironment.isSub) {
inheritanceOverrides = R.mergeDeepRight(
getInheritanceOverrides(state, {
envParentId: blockId,
environmentId: blockEnvironment.parentEnvironmentId,
}),
inheritanceOverrides
) as typeof inheritanceOverrides;
}
const generatedEnvkeyId = Object.keys(
toSet.blockKeyableParents[blockId][keyableParentId]
)[0],
generatedEnvkey = state.graph[
generatedEnvkeyId
] as Model.GeneratedEnvkey,
envkeyToSet =
toSet.blockKeyableParents[blockId][keyableParentId][
generatedEnvkeyId
];
if (envkeyToSet.env) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId: blockEnvironment.isSub
? blockEnvironment.parentEnvironmentId
: blockEnvironment.id,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"blockKeyableParents",
blockId,
keyableParent.id,
generatedEnvkey.id,
"env",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
if (envkeyToSet.subEnv && blockEnvironment.isSub) {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId: blockEnvironment.id,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"blockKeyableParents",
blockId,
keyableParent.id,
generatedEnvkey.id,
"subEnv",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
if (envkeyToSet.localOverrides && keyableParent.type == "localKey") {
const key =
state.envs[
getUserEncryptedKeyOrBlobComposite({
environmentId: blockId + "|" + keyableParent.userId,
})
]?.key;
if (key) {
toEncryptKeys.push([
[
"blockKeyableParents",
blockId,
keyableParent.id,
generatedEnvkey.id,
"localOverrides",
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
// inheritance overrides
if (!R.isEmpty(inheritanceOverrides)) {
for (let inheritanceOverridesEnvironmentId in inheritanceOverrides) {
const composite = getUserEncryptedKeyOrBlobComposite({
environmentId: blockEnvironment.id,
inheritsEnvironmentId: inheritanceOverridesEnvironmentId,
});
let key = state.envs[composite]?.key;
if (!key) {
key = symmetricEncryptionKey();
toEncryptBlobs.push([
[
blockEnvironment.envParentId,
"environments",
blockEnvironment.id,
"inheritanceOverrides",
inheritanceOverridesEnvironmentId,
],
{
data: JSON.stringify(
inheritanceOverrides[inheritanceOverridesEnvironmentId]
),
encryptionKey: key,
},
]);
}
toEncryptKeys.push([
[
"blockKeyableParents",
blockId,
keyableParent.id,
generatedEnvkey.id,
"inheritanceOverrides",
inheritanceOverridesEnvironmentId,
"data",
],
{
data: key,
pubkey: generatedEnvkey.pubkey,
privkey,
},
]);
}
}
}
}
}
// verify all keyables
await Promise.all(
Array.from(toVerifyKeyableIds).map((keyableId) =>
verifyOrgKeyable(state, keyableId, context)
)
);
const keyPromises = toEncryptKeys.map(([path, params]) =>
encrypt(params).then((encrypted) => [path, encrypted])
) as Promise<[string[], Crypto.EncryptedData]>[],
blobPromises = toEncryptBlobs.map(([path, params]) =>
encryptSymmetricWithKey(params).then((encrypted) => [path, encrypted])
) as Promise<[string[], Crypto.EncryptedData]>[],
[keyPathResults, blobPathResults] = await Promise.all([
Promise.all(keyPromises),
Promise.all(blobPromises),
]);
for (let [path, data] of keyPathResults) {
set(keys, path, data);
}
for (let [path, data] of blobPathResults) {
set(blobs, path, data);
}
let encryptedByTrustChain: string | undefined;
const hasKeyables =
Object.keys(toSet.keyableParents ?? {}).length +
Object.keys(toSet.blockKeyableParents ?? {}).length >
0;
if (hasKeyables) {
const trustChain = getTrustChain(state, context.accountIdOrCliKey);
encryptedByTrustChain = await signJson({
data: trustChain,
privkey,
});
}
return {
keys,
blobs,
encryptedByTrustChain: encryptedByTrustChain
? { data: encryptedByTrustChain }
: undefined,
} as Api.Net.EnvParams;
}; | the_stack |
import {
Address,
Algorithm,
ChainId,
Hash,
Nonce,
PostableBytes,
Preimage,
PubkeyBytes,
SendTransaction,
SignatureBytes,
SignedTransaction,
SwapAbortTransaction,
SwapClaimTransaction,
SwapId,
SwapIdBytes,
SwapOfferTransaction,
TokenTicker,
} from "@iov/bcp";
import { ExtendedSecp256k1Signature } from "@iov/crypto";
import { fromHex, toUtf8 } from "@iov/encoding";
import { Erc20ApproveTransaction, Erc20Options } from "./erc20";
import { EthereumCodec } from "./ethereumcodec";
import { EthereumRpcTransactionResult } from "./ethereumrpctransactionresult";
import { SwapIdPrefix } from "./serializationcommon";
import { testConfig } from "./testconfig.spec";
const ethereumCodec = new EthereumCodec({
atomicSwapEtherContractAddress: testConfig.connectionOptions.atomicSwapEtherContractAddress,
atomicSwapErc20ContractAddress: testConfig.connectionOptions.atomicSwapErc20ContractAddress,
});
describe("ethereumCodec", () => {
describe("parseBytes", () => {
it("works", () => {
// curl -sS -X POST --data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x3b87faa3410f33284124a6898fac1001673f0f7c3682d18f55bdff0031cce9ce"],"id":1}' https://rinkeby.infura.io | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
blockHash: "0x05ebd1bd99956537f49cfa1104682b3b3f9ff9249fa41a09931ce93368606c21",
blockNumber: "0x37ef3e",
from: "0x0a65766695a712af41b5cfecaad217b1a11cb22a",
gas: "0x226c8",
gasPrice: "0x3b9aca00",
hash: "0x3b87faa3410f33284124a6898fac1001673f0f7c3682d18f55bdff0031cce9ce",
input: "0x536561726368207478207465737420302e36353930383639313733393634333335",
nonce: "0xe1",
r: "0xb9299dab50b3cddcaecd64b29bfbd5cd30fac1a1adea1b359a13c4e5171492a6",
s: "0x573059c66d894684488f92e7ce1f91b158ca57b0235485625b576a3b98c480ac",
to: "0xe137f5264b6b528244e1643a2d570b37660b7f14",
transactionIndex: "0xb",
v: "0x2b",
value: "0x53177c",
};
const expectedPubkey = fromHex(
"041d4c015b00cbd914e280b871d3c6ae2a047ca650d3ecea4b5246bb3036d4d74960b7feb09068164d2b82f1c7df9e95839b29ae38e90d60578b2318a54e108cf8",
) as PubkeyBytes;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const parsed = ethereumCodec.parseBytes(postableBytes, "ethereum-eip155-4" as ChainId);
expect((parsed as unknown) as SignedTransaction<SendTransaction>).toEqual({
transaction: {
kind: "bcp/send",
chainId: "ethereum-eip155-4" as ChainId,
fee: {
gasLimit: "141000",
gasPrice: {
quantity: "1000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amount: {
quantity: "5445500",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
sender: "0x0A65766695A712Af41B5cfECAaD217B1a11CB22A" as Address,
recipient: "0xE137f5264b6B528244E1643a2D570b37660B7F14" as Address,
memo: "Search tx test 0.6590869173964335",
},
signatures: [
{
nonce: 225 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("b9299dab50b3cddcaecd64b29bfbd5cd30fac1a1adea1b359a13c4e5171492a6"),
fromHex("573059c66d894684488f92e7ce1f91b158ca57b0235485625b576a3b98c480ac"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for ERC20 transfer", () => {
// curl -sS -X POST --data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x80295fc8cdf6ac5fce39f34037f07d5be3abe82baa8468196faf1f00ced239e3"],"id":1}' https://rinkeby.infura.io | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
blockHash: "0x135592131306762eef45d8f12c3d27c5d709c84d124f0df062d0bb3806a32701",
blockNumber: "0x3f20f6",
from: "0x9bd26664827550982960b9e76bcd88c0b6791bb4",
gas: "0x226c8",
gasPrice: "0x3b9aca00",
hash: "0x80295fc8cdf6ac5fce39f34037f07d5be3abe82baa8468196faf1f00ced239e3",
input:
"0xa9059cbb0000000000000000000000009ea4094ed5d7e089ac846c7d66fc518bd24753ab0000000000000000000000000000000000000000000000000000000000000002",
nonce: "0x1",
r: "0xcbe96b38321e6ef536da5e74b558cf87acdda825be35be40627b2b3d8633b8f4",
s: "0x7fc31ca5bb3dbd02e5e8fc5093082f3f2ab3e0042d4e5b25fe09e5f7485d83b7",
to: "0xc778417e063141139fce010982780140aa0cd5ab",
transactionIndex: "0x7",
v: "0x2b",
value: "0x0",
};
const expectedPubkey = fromHex(
"040b8b6f82e7226d21991dd6b1a7de357cebfc42ccb95678404d8e2b54cc3be187b17a50ef833884df318aa7def070585c92a185272b8cb8b61ba916d993435c87",
) as PubkeyBytes;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const erc20Tokens = new Map<TokenTicker, Erc20Options>([
[
"WETH" as TokenTicker,
{
contractAddress: "0xc778417e063141139fce010982780140aa0cd5ab" as Address,
decimals: 18,
symbol: "WETH" as TokenTicker,
},
],
]);
const codec = new EthereumCodec({ erc20Tokens: erc20Tokens });
const parsed = codec.parseBytes(postableBytes, "ethereum-eip155-4" as ChainId);
expect((parsed as unknown) as SignedTransaction<SendTransaction>).toEqual({
transaction: {
kind: "bcp/send",
chainId: "ethereum-eip155-4" as ChainId,
fee: {
gasLimit: "141000",
gasPrice: {
quantity: "1000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amount: {
quantity: "2",
fractionalDigits: 18,
tokenTicker: "WETH" as TokenTicker,
},
sender: "0x9bD26664827550982960b9E76bcd88C0b6791bb4" as Address,
recipient: "0x9ea4094Ed5D7E089ac846C7D66fc518bd24753ab" as Address,
memo: undefined,
},
signatures: [
{
nonce: 1 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("cbe96b38321e6ef536da5e74b558cf87acdda825be35be40627b2b3d8633b8f4"),
fromHex("7fc31ca5bb3dbd02e5e8fc5093082f3f2ab3e0042d4e5b25fe09e5f7485d83b7"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("interprets ERC20 transfer of unknown contract as ETH send", () => {
// There is only one transaction type on Ethereum. Smart contract interaction and
// ETH sends can only reliably be differentiated when all contract addresses are known.
// As a result, we interpret all unknown transactions as ETH send.
// curl -sS -X POST --data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x80295fc8cdf6ac5fce39f34037f07d5be3abe82baa8468196faf1f00ced239e3"],"id":1}' https://rinkeby.infura.io | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
blockHash: "0x135592131306762eef45d8f12c3d27c5d709c84d124f0df062d0bb3806a32701",
blockNumber: "0x3f20f6",
from: "0x9bd26664827550982960b9e76bcd88c0b6791bb4",
gas: "0x226c8",
gasPrice: "0x3b9aca00",
hash: "0x80295fc8cdf6ac5fce39f34037f07d5be3abe82baa8468196faf1f00ced239e3",
input:
"0xa9059cbb0000000000000000000000009ea4094ed5d7e089ac846c7d66fc518bd24753ab0000000000000000000000000000000000000000000000000000000000000002",
nonce: "0x1",
r: "0xcbe96b38321e6ef536da5e74b558cf87acdda825be35be40627b2b3d8633b8f4",
s: "0x7fc31ca5bb3dbd02e5e8fc5093082f3f2ab3e0042d4e5b25fe09e5f7485d83b7",
to: "0xc778417e063141139fce010982780140aa0cd5ab",
transactionIndex: "0x7",
v: "0x2b",
value: "0x0",
};
const expectedPubkey = fromHex(
"040b8b6f82e7226d21991dd6b1a7de357cebfc42ccb95678404d8e2b54cc3be187b17a50ef833884df318aa7def070585c92a185272b8cb8b61ba916d993435c87",
) as PubkeyBytes;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const codec = new EthereumCodec({ erc20Tokens: new Map<TokenTicker, Erc20Options>() });
const parsed = codec.parseBytes(postableBytes, "ethereum-eip155-4" as ChainId);
expect((parsed as unknown) as SignedTransaction<SendTransaction>).toEqual({
transaction: {
kind: "bcp/send",
chainId: "ethereum-eip155-4" as ChainId,
fee: {
gasLimit: "141000",
gasPrice: {
quantity: "1000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amount: {
quantity: "0",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
sender: "0x9bD26664827550982960b9E76bcd88C0b6791bb4" as Address,
recipient: "0xc778417E063141139Fce010982780140Aa0cD5Ab" as Address,
// Non-UTF8 automatically represented as hex
memo:
"a9059cbb00000000 0000000000000000 9ea4094ed5d7e089 ac846c7d66fc518b d24753ab00000000 0000000000000000 0000000000000000 0000000000000000 00000002",
},
signatures: [
{
nonce: 1 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("cbe96b38321e6ef536da5e74b558cf87acdda825be35be40627b2b3d8633b8f4"),
fromHex("7fc31ca5bb3dbd02e5e8fc5093082f3f2ab3e0042d4e5b25fe09e5f7485d83b7"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for ERC20 approve", () => {
// curl -sS -X POST --data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x4734349dd36860c9f7c981e2c673f986ade036e2b7b64dcc55f0bf0ce461daae"],"id":1}' https://mainnet.infura.io | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
blockHash: "0x6181b9dd9b4237a4a0228accd86d9c989882187bc66f57070a725f61298e3860",
blockNumber: "0x7482d5",
from: "0xbdfd9e1fa05c6ad0714e6f27bdb4b821ec99f7a2",
gas: "0x186a0",
gasPrice: "0xa7a358200",
hash: "0x4734349dd36860c9f7c981e2c673f986ade036e2b7b64dcc55f0bf0ce461daae",
input:
"0x095ea7b30000000000000000000000004b525ae3a20021639d6e00bf752e6d2b7f65196effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
nonce: "0x0",
r: "0x194013d2767d86e0aac07f5e713e52c1bafdbe20361b59257ae7e5665d504bf1",
s: "0x76deb0b778442ff69b61fa9c27333e4b2e6c184643b1ce3d60b4da2cb39266c3",
to: "0x1985365e9f78359a9b6ad760e32412f4a445e862",
transactionIndex: "0x33",
v: "0x25",
value: "0x0",
};
const expectedPubkey = fromHex(
"043e82ebc5dd773720677229f4eedcb61dbb131533ce0a4206e3788a92b70224505ef120ca98418d3657c891d0cd74cb15dca10f94a3ffd7a7f65bc99e1138b5c2",
) as PubkeyBytes;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const erc20Tokens = new Map<TokenTicker, Erc20Options>([
[
"REP" as TokenTicker,
{
contractAddress: "0x1985365e9f78359a9b6ad760e32412f4a445e862" as Address,
decimals: 18,
symbol: "REP" as TokenTicker,
},
],
]);
const codec = new EthereumCodec({ erc20Tokens: erc20Tokens });
const parsed = codec.parseBytes(postableBytes, "ethereum-eip155-1" as ChainId);
expect((parsed as unknown) as SignedTransaction<Erc20ApproveTransaction>).toEqual({
transaction: {
kind: "erc20/approve",
chainId: "ethereum-eip155-1" as ChainId,
fee: {
gasLimit: "100000",
gasPrice: {
quantity: "45000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amount: {
quantity: "115792089237316195423570985008687907853269984665640564039457584007913129639935",
fractionalDigits: 18,
tokenTicker: "REP" as TokenTicker,
},
spender: "0x4b525aE3A20021639D6e00bf752E6d2B7F65196e" as Address,
},
signatures: [
{
nonce: 0 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("194013d2767d86e0aac07f5e713e52c1bafdbe20361b59257ae7e5665d504bf1"),
fromHex("76deb0b778442ff69b61fa9c27333e4b2e6c184643b1ce3d60b4da2cb39266c3"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Ether atomic swap offer", () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x044870acdb5fdab0f76266eda11cdeee50de880f58ccf6bfb32a6b651914f637"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0x044870acdb5fdab0f76266eda11cdeee50de880f58ccf6bfb32a6b651914f637",
nonce: "0x36",
blockHash: "0x5fabed71b3d99dec733a8d513c1e91971d8446baeb044d00d609937148712929",
blockNumber: "0x37",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
// lowercase value of testConfig.connectionOptions.atomicSwapEtherContractAddress
to: "0xe1c9ea25a621cf5c934a7e112ecab640ec7d8d18",
value: "0x1ca3660340",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input:
"0x0eed85485cecbb0814d20c1f6221fdec0c2902172182d1b2f3212957f947e4cea398ebe6000000000000000000000000901a84da2b9c5cbb64d8aeeca58d5fd0339bb018015d55677261fb5deb1e94dac1ffb6dc0de51eb3b6c0631f7f9f2e4f41eca085000000000000000000000000000000000000000000000000000000000000003b",
v: "0x2d45",
r: "0x9351a7fa42078636bd36bbae0d8d5f009b92986991bcc92ae882bce8982360d5",
s: "0x706a6f90bb42b9d929150839fb9aa06207e40f8d814183e30789ac036263497f",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Ether,
data: fromHex("5cecbb0814d20c1f6221fdec0c2902172182d1b2f3212957f947e4cea398ebe6") as SwapIdBytes,
};
const expectedRecipient = "0x901A84DA2b9c5CBb64D8AEECa58D5FD0339bB018" as Address;
const expectedHash = fromHex(
"015d55677261fb5deb1e94dac1ffb6dc0de51eb3b6c0631f7f9f2e4f41eca085",
) as Hash;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const parsed = ethereumCodec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapOfferTransaction>).toEqual({
transaction: {
kind: "bcp/swap_offer",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amounts: [
{
quantity: "123000456000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
],
swapId: expectedSwapId,
sender: "0x88F3b5659075D0E06bB1004BE7b1a7E66F452284",
recipient: expectedRecipient,
hash: expectedHash,
timeout: {
height: 59,
},
},
signatures: [
{
nonce: 54 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("9351a7fa42078636bd36bbae0d8d5f009b92986991bcc92ae882bce8982360d5"),
fromHex("706a6f90bb42b9d929150839fb9aa06207e40f8d814183e30789ac036263497f"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Ether atomic swap claim", async () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x85bbdfabbf38e6888488f7e967a5b7784bc8041416773ecdfc2b57e365bc3777"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0x85bbdfabbf38e6888488f7e967a5b7784bc8041416773ecdfc2b57e365bc3777",
nonce: "0x7e",
blockHash: "0x3e89a5ab1eff099d952cc0a942c12f8802ba1ff5ed2e97b2711a5a94866f8ed6",
blockNumber: "0x7f",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
to: "0xe1c9ea25a621cf5c934a7e112ecab640ec7d8d18",
value: "0x0",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input:
"0x84cc9dfb069446e5b7469d5301212de56f17a8786bee70d9bf4c072e99fcfb2c4d9f5242c863ca8b63351354c4dafbf585a28095bf9ef5c6719fd7eacc7a1ce0ad27a298",
v: "0x2d46",
r: "0xde9a75921207a5df2757d76408436e38f2186a0047e2955f884f0672c805282c",
s: "0x259c0f7c9a1383ef35fa5f41996dfb38744f9df6fe027a2e107d0b6d40ab1ae6",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Ether,
data: fromHex("069446e5b7469d5301212de56f17a8786bee70d9bf4c072e99fcfb2c4d9f5242") as SwapIdBytes,
};
const expectedPreimage = fromHex(
"c863ca8b63351354c4dafbf585a28095bf9ef5c6719fd7eacc7a1ce0ad27a298",
) as Preimage;
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const parsed = ethereumCodec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapClaimTransaction>).toEqual({
transaction: {
kind: "bcp/swap_claim",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
swapId: expectedSwapId,
preimage: expectedPreimage,
},
signatures: [
{
nonce: 126 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("de9a75921207a5df2757d76408436e38f2186a0047e2955f884f0672c805282c"),
fromHex("259c0f7c9a1383ef35fa5f41996dfb38744f9df6fe027a2e107d0b6d40ab1ae6"),
1,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Ether atomic swap abort", async () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0xb8a6bdbcc56f30e385e928fee46374bceec1c6887814c0b4bddb23c8df25d91b"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0xb8a6bdbcc56f30e385e928fee46374bceec1c6887814c0b4bddb23c8df25d91b",
nonce: "0xa0",
blockHash: "0x06d53572b61a054d12afab59fde70cbde3cbd9385e5a4fc07dadcf0c87abd414",
blockNumber: "0xa1",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
to: "0xe1c9ea25a621cf5c934a7e112ecab640ec7d8d18",
value: "0x0",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input: "0x09d6ce0ea7679de779f2df7fde7617a9cdd013c8dbf5701aa158173d9c615766a212d243",
v: "0x2d45",
r: "0x3449246d974d28fffae32af389ef7271c18ff6e6766ce6f54f6243764e6877d6",
s: "0x7e2280164650d4fc0c4a2fd1edfeedb70e7c8453117f9cbd6a644abd5e1ddf9b",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Ether,
data: fromHex("a7679de779f2df7fde7617a9cdd013c8dbf5701aa158173d9c615766a212d243") as SwapIdBytes,
};
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const parsed = ethereumCodec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapAbortTransaction>).toEqual({
transaction: {
kind: "bcp/swap_abort",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
swapId: expectedSwapId,
},
signatures: [
{
nonce: 160 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("3449246d974d28fffae32af389ef7271c18ff6e6766ce6f54f6243764e6877d6"),
fromHex("7e2280164650d4fc0c4a2fd1edfeedb70e7c8453117f9cbd6a644abd5e1ddf9b"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Erc20 atomic swap offer", () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x293419b275bfdab614697c2dabe85cf9f47ad8d728675d622d65e0839992afc2"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0x293419b275bfdab614697c2dabe85cf9f47ad8d728675d622d65e0839992afc2",
nonce: "0xf5",
blockHash: "0x9b87730ec2ff05f8017a7226f21eba47bc680e2edec681474efbbe4878602067",
blockNumber: "0x10e",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
to: "0x9768ae2339b48643d710b11ddbdb8a7edbea15bc",
value: "0x0",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input:
"0xe8d8a293c42b0efc99bb1726bb429b5a4ccf7b4b236c54027eb15cd5c24761f8adf8def30000000000000000000000009c212466a863c2635a31c41f6384818816869a0fc7c270042ff9d49b3283da3b12adb1711608096f3774bbb1fabf7727d9b1b0a20000000000000000000000000000000000000000000000000000000000000111000000000000000000000000cb642a87923580b6f7d07d1471f93361196f2650000000000000000000000000000000000000000000000000000000000001e078",
v: "0x2d46",
r: "0x26a7e609ce83e01b8e754641fbd9315f5a558c7b279d1bbe186d8be786c6fb18",
s: "0x02555167f9584575f9740c0487dcdd2e1462a8374dd9dcc9e66cfa98eb1efd87",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Erc20,
data: fromHex("c42b0efc99bb1726bb429b5a4ccf7b4b236c54027eb15cd5c24761f8adf8def3") as SwapIdBytes,
};
const expectedRecipient = "0x9c212466A863C2635A31c41f6384818816869a0F" as Address;
const expectedHash = fromHex(
"c7c270042ff9d49b3283da3b12adb1711608096f3774bbb1fabf7727d9b1b0a2",
) as Hash;
const erc20Tokens = new Map<TokenTicker, Erc20Options>([
[
"ASH" as TokenTicker,
{
contractAddress: "0xCb642A87923580b6F7D07D1471F93361196f2650" as Address,
decimals: 12,
symbol: "ASH" as TokenTicker,
},
],
]);
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const codec = new EthereumCodec({
atomicSwapErc20ContractAddress: testConfig.connectionOptions.atomicSwapErc20ContractAddress,
erc20Tokens: erc20Tokens,
});
const parsed = codec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapOfferTransaction>).toEqual({
transaction: {
kind: "bcp/swap_offer",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
amounts: [
{
quantity: "123000",
fractionalDigits: 12,
tokenTicker: "ASH" as TokenTicker,
},
],
swapId: expectedSwapId,
sender: "0x88F3b5659075D0E06bB1004BE7b1a7E66F452284",
recipient: expectedRecipient,
hash: expectedHash,
timeout: {
height: 273,
},
},
signatures: [
{
nonce: 245 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("26a7e609ce83e01b8e754641fbd9315f5a558c7b279d1bbe186d8be786c6fb18"),
fromHex("02555167f9584575f9740c0487dcdd2e1462a8374dd9dcc9e66cfa98eb1efd87"),
1,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Erc20 atomic swap claim", async () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0xa7a4ed452ee5861aaa20837a4fdf077b8ab28a2c08a228b6eb64268b8ee35764"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0xa7a4ed452ee5861aaa20837a4fdf077b8ab28a2c08a228b6eb64268b8ee35764",
nonce: "0x133",
blockHash: "0x468c58013ef907cd84c40484e35a183f28b48469ff38f6e8c761d42e07c85a0c",
blockNumber: "0x150",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
to: "0x9768ae2339b48643d710b11ddbdb8a7edbea15bc",
value: "0x0",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input:
"0x84cc9dfb94d53ea2d55dc86e65e44fcb473fb58dbbc00eab27f414d1b280af26222a995c2d5620657269304e87faf497880c77f5a4f7bb7b3eff66da70a83408549c219f",
v: "0x2d45",
r: "0x388baff61d88ff954b40e8980c42a50633d08f954a3b281513ffbd1759fa902e",
s: "0x578e59f1cfba37881d4851d0cb96eee287436fbe4a3a9ea4f6407c064f1e4f03",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Erc20,
data: fromHex("94d53ea2d55dc86e65e44fcb473fb58dbbc00eab27f414d1b280af26222a995c") as SwapIdBytes,
};
const expectedPreimage = fromHex(
"2d5620657269304e87faf497880c77f5a4f7bb7b3eff66da70a83408549c219f",
) as Preimage;
const erc20Tokens = new Map<TokenTicker, Erc20Options>([
[
"ASH" as TokenTicker,
{
contractAddress: "0xCb642A87923580b6F7D07D1471F93361196f2650" as Address,
decimals: 12,
symbol: "ASH" as TokenTicker,
},
],
]);
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const codec = new EthereumCodec({
atomicSwapErc20ContractAddress: testConfig.connectionOptions.atomicSwapErc20ContractAddress,
erc20Tokens: erc20Tokens,
});
const parsed = codec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapClaimTransaction>).toEqual({
transaction: {
kind: "bcp/swap_claim",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
swapId: expectedSwapId,
preimage: expectedPreimage,
},
signatures: [
{
nonce: 307 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("388baff61d88ff954b40e8980c42a50633d08f954a3b281513ffbd1759fa902e"),
fromHex("578e59f1cfba37881d4851d0cb96eee287436fbe4a3a9ea4f6407c064f1e4f03"),
0,
).toFixedLength() as SignatureBytes,
},
],
});
});
it("works for Erc20 atomic swap abort", async () => {
// Retrieved from local instance since we haven't deployed this to a public testnet
// curl - sS - X POST--data '{"jsonrpc":"2.0","method":"eth_getTransactionByHash","params":["0x224c51c8641da40fec0987972af1f6e7bf4635a2eb7366be68da1bfcea144814"],"id":1}' http://localhost:8545 | jq .result
const rawGetTransactionByHashResult: EthereumRpcTransactionResult = {
hash: "0x224c51c8641da40fec0987972af1f6e7bf4635a2eb7366be68da1bfcea144814",
nonce: "0x15b",
blockHash: "0x66b46bae1c7023c2c3c98228fabd317b096ad5a0d542677d4c3cf3f739bdddc5",
blockNumber: "0x178",
transactionIndex: "0x0",
from: "0x88f3b5659075d0e06bb1004be7b1a7e66f452284",
to: "0x9768ae2339b48643d710b11ddbdb8a7edbea15bc",
value: "0x0",
gas: "0x200b20",
gasPrice: "0x4a817c800",
input: "0x09d6ce0e144c07a765cd2435882edbc334218b1678b2c5773284bf715ba766f97ee4f2fd",
v: "0x2d46",
r: "0x367e3b3f8253f2f0f4404754b139d93362973de949ccdd5f0dc5a342f8cd2131",
s: "0x0f80b13350349f083f44dd507a7ae7bc783389a7b4c18853d073e0e8c59c6ce0",
};
const expectedPubkey = fromHex(
"04965fb72aad79318cd8c8c975cf18fa8bcac0c091605d10e89cd5a9f7cff564b0cb0459a7c22903119f7a42947c32c1cc6a434a86f0e26aad00ca2b2aff6ba381",
) as PubkeyBytes;
const expectedSwapId: SwapId = {
prefix: SwapIdPrefix.Erc20,
data: fromHex("144c07a765cd2435882edbc334218b1678b2c5773284bf715ba766f97ee4f2fd") as SwapIdBytes,
};
const postableBytes = toUtf8(JSON.stringify(rawGetTransactionByHashResult)) as PostableBytes;
const parsed = ethereumCodec.parseBytes(postableBytes, "ethereum-eip155-5777" as ChainId);
expect((parsed as unknown) as SignedTransaction<SwapAbortTransaction>).toEqual({
transaction: {
kind: "bcp/swap_abort",
chainId: "ethereum-eip155-5777" as ChainId,
fee: {
gasLimit: "2100000",
gasPrice: {
quantity: "20000000000",
fractionalDigits: 18,
tokenTicker: "ETH" as TokenTicker,
},
},
swapId: expectedSwapId,
},
signatures: [
{
nonce: 347 as Nonce,
pubkey: {
algo: Algorithm.Secp256k1,
data: expectedPubkey,
},
signature: new ExtendedSecp256k1Signature(
fromHex("367e3b3f8253f2f0f4404754b139d93362973de949ccdd5f0dc5a342f8cd2131"),
fromHex("0f80b13350349f083f44dd507a7ae7bc783389a7b4c18853d073e0e8c59c6ce0"),
1,
).toFixedLength() as SignatureBytes,
},
],
});
});
});
}); | the_stack |
import {
triggerEvent, options
} from '@tko/utils';
import {
applyBindings
} from '@tko/bind';
import {
observable as Observable
} from '@tko/observable';
import {
bindings as coreBindings
} from '@tko/binding.core';
import {
MultiProvider
} from '@tko/provider.multi'
import {
DataBindProvider
} from '@tko/provider.databind'
import {
AttributeMustacheProvider
} from '../dist';
import '@tko/utils/helpers/jasmine-13-helper';
function ctxStub (obj = {}) { return { lookup (v) { return obj[v] } } }
describe('Attribute Interpolation Markup Provider', function () {
var testNode, provider;
beforeEach(function () {
provider = new AttributeMustacheProvider()
options.bindingProviderInstance = provider
testNode = document.createElement('div');
provider.bindingHandlers.set(coreBindings);
});
function runAttributeInterpolation (testNode) {
applyBindings({}, testNode)
}
it('Should do nothing when there are no expressions', function () {
testNode.setAttribute('title', 'some text');
expect(testNode.title).toEqual('some text');
expect(Object.keys(provider.getBindingAccessors).length).toBe(0)
});
it('Should do nothing when empty', function () {
testNode.setAttribute('title', '');
runAttributeInterpolation(testNode);
expect(testNode.title).toEqual('');
const bindings = provider.getBindingAccessors(testNode)
expect(Object.keys(bindings).length).toBe(0)
});
it('Should not parse unclosed binding', function () {
testNode.setAttribute('title', 'some {{text');
runAttributeInterpolation(testNode);
expect(testNode.title).toEqual('some {{text');
const bindings = provider.getBindingAccessors(testNode)
expect(Object.keys(bindings).length).toBe(0)
});
it('Should not parse unopened binding', function () {
testNode.setAttribute('title', 'some}} text');
runAttributeInterpolation(testNode);
expect(testNode.title).toEqual('some}} text');
const bindings = provider.getBindingAccessors(testNode)
expect(Object.keys(bindings).length).toBe(0)
});
it('Should create binding from {{...}} expression', function () {
testNode.setAttribute('title', 'some {{expr}} text');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(handler).toEqual('title')
expect(parts.length).toBe(3)
expect(parts[0].text).toEqual('some ')
expect(parts[1].text).toEqual('expr')
expect(parts[2].text).toEqual(' text')
});
it('Should ignore unmatched delimiters', function () {
testNode.setAttribute('title', 'some {{expr1}}expr2}} text');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(parts.length).toBe(3)
expect(parts[0].text).toEqual('some ')
expect(parts[1].text).toEqual('expr1}}expr2')
expect(parts[2].text).toEqual(' text')
});
it('Should support two expressions', function () {
testNode.setAttribute('title', 'some {{expr1}} middle {{expr2}} text');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(parts.length).toBe(5)
const expected = ['some ', 'expr1', ' middle ', 'expr2', ' text']
for (let i = 0; i < expected.length; ++i) {
expect(parts[i].text).toEqual(expected[i])
}
});
it('Should skip empty text', function () {
testNode.setAttribute('title', '{{expr1}}{{expr2}}');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(parts.length).toBe(2)
const expected = ['expr1', 'expr2']
for (let i = 0; i < expected.length; ++i) {
expect(parts[i].text).toEqual(expected[i])
}
});
it('Should support more than two expressions', function () {
testNode.setAttribute('title', 'x {{expr1}} y {{expr2}} z {{expr3}}');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(parts.length).toBe(6)
const expected = ['x ', 'expr1', ' y ', 'expr2', ' z ', 'expr3']
for (let i = 0; i < expected.length; ++i) {
expect(parts[i].text).toEqual(expected[i])
}
});
it('Should create simple binding for single expression', function () {
testNode.setAttribute('title', '{{expr1}}');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(1)
const [handler, parts] = bindings[0]
expect(parts.length).toBe(1)
const expected = ['expr1']
for (let i = 0; i < expected.length; ++i) {
expect(parts[i].text).toEqual(expected[i])
}
});
it('Should support expressions in multiple attributes', function () {
testNode.setAttribute('title', '{{expr1}}');
testNode.setAttribute('class', 'test'); // skipped b/c not interpolated
testNode.setAttribute('id', '{{expr2}}');
testNode.setAttribute('data-test', '{{expr3}}');
const bindings = Array.from(provider.bindingParts(testNode, {}))
expect(bindings.length).toBe(3)
const [p0, p1, p2, p3] = bindings
const map = { title: 'expr1', id: 'expr2', 'data-test': 'expr3' }
bindings.forEach(b => {
const [handler, [part]] = b
expect(map[handler]).toEqual(part.text)
})
expect(testNode.getAttribute('class')).toEqual('test')
});
it('Should convert value and checked attributes to two-way bindings', function () {
var input = document.createElement('input')
input.type = 'checkbox'
input.setAttribute('checked', '{{expr2}}')
input.setAttribute('value', '{{expr1}}')
const ctx = { expr1: Observable(), expr2: Observable() }
const bindings = Array.from(
provider.bindingObjects(testNode, ctxStub(ctx))
)
for (const binding of bindings) {
if (binding.checked) {
expect(binding.checked).toEqual(ctx.expr2)
} else if (binding.value) {
expect(binding.value).toEqual(ctx.expr1)
} else {
throw new Error('Unexpected bindings.')
}
}
});
it('Should support custom attribute binding using "attributeBinding" overloading', function () {
class KOAttr extends AttributeMustacheProvider {
attributeBinding (name, value) {
const parsedName = name.match(/^ko-(.*)$/)
if (parsedName) {
return super.attributeBinding(parsedName[1], value)
}
return super.attributeBinding(name, value)
}
}
const provider = new KOAttr()
// Won't be in data-bind because it doesn't include an expression
testNode.setAttribute('ko-class', 'test')
// Should handle normal attributes normally
testNode.setAttribute('title', '{{expr1}}')
// This will use the custom handler
testNode.setAttribute('ko-id', '{{expr2}}')
const ctx = {expr1: 'x', expr2: 'y'}
const bindings = provider.getBindingAccessors(testNode, ctxStub(ctx))
expect(Object.keys(bindings).length).toEqual(2)
expect(bindings['attr.title']().title).toEqual('x')
expect(bindings['attr.id']().id).toEqual('y')
// expect(testNode.getAttribute('data-bind')).toEqual('attr.title:expr1,attr.id:expr2')
});
it('should set the style attribute (when there is a `style` binding)', function () {
var obs = Observable()
testNode.innerHTML = '<div style="color: {{ obs }}"></div>'
var div = testNode.childNodes[0]
applyBindings({obs: obs}, testNode)
expect(div.getAttribute('style')).toEqual('color: ')
obs('red')
expect(div.getAttribute('style')).toEqual('color: red')
})
});
describe('Attribute Interpolation Markup bindings', function () {
beforeEach(jasmine.prepareTestNode);
var bindingHandlers;
beforeEach(function () {
const providers = [
new AttributeMustacheProvider(),
new DataBindProvider()
]
const provider = new MultiProvider({providers})
options.bindingProviderInstance = provider
bindingHandlers = provider.bindingHandlers
bindingHandlers.set(coreBindings)
});
it('Should replace {{...}} expression in attribute', function () {
testNode.innerHTML = "<div title='hello {{\"name\"}}!'></div>";
applyBindings(null, testNode);
expect(testNode.childNodes[0].title).toEqual('hello name!');
});
it('Should replace multiple expressions', function () {
testNode.innerHTML = "<div title='hello {{\"name\"}}{{\"!\"}}'></div>";
applyBindings(null, testNode);
expect(testNode.childNodes[0].title).toEqual('hello name!');
});
it('Should support backtick interpolation', function () {
testNode.innerHTML = "<div title='hello {{ `a${name}b` }}!'></div>";
applyBindings({ name: 'n' }, testNode);
expect(testNode.childNodes[0].title).toEqual('hello anb!');
});
it('Should properly handle quotes in text sections', function () {
testNode.innerHTML = "<div title='This is \"great\" {{\"fun\"}} with 'friends''></div>";
applyBindings(null, testNode);
expect(testNode.childNodes[0].title).toEqual("This is \"great\" fun with 'friends'");
});
it('Should ignore unmatched }} and {{', function () {
testNode.innerHTML = "<div title='hello }}\"name\"{{\"!\"}}{{'></div>";
applyBindings(null, testNode);
expect(testNode.childNodes[0].title).toEqual('hello }}"name"!{{');
});
it('Should support expressions in multiple attributes', function () {
testNode.innerHTML = "<div title='{{title}}' id='{{id}}' class='test class' data-test='hello {{\"name\"}}!' data-bind='text:content'></div>";
applyBindings({title: 'the title', id: 'test id', content: 'content'}, testNode);
expect(testNode).toContainText('content');
expect(testNode.childNodes[0].title).toEqual('the title');
expect(testNode.childNodes[0].id).toEqual('test id');
expect(testNode.childNodes[0].className).toEqual('test class');
expect(testNode.childNodes[0].getAttribute('data-test')).toEqual('hello name!');
});
it('Should update when observable changes', function () {
testNode.innerHTML = "<div title='The best {{what}}.'></div>";
var observable = Observable('time');
applyBindings({what: observable}, testNode);
expect(testNode.childNodes[0].title).toEqual('The best time.');
observable('fun');
expect(testNode.childNodes[0].title).toEqual('The best fun.');
});
it('Should convert value attribute to two-way binding', function () {
testNode.innerHTML = "<input value='{{value}}'/>";
var observable = Observable('default value');
applyBindings({value: observable}, testNode);
expect(testNode.childNodes[0].value).toEqual('default value');
testNode.childNodes[0].value = 'user-enterd value';
triggerEvent(testNode.childNodes[0], 'change');
expect(observable()).toEqual('user-enterd value');
});
it('Should convert checked attribute to two-way binding', function () {
testNode.innerHTML = "<input type='checkbox' checked='{{isChecked}}'/>";
var observable = Observable(true);
applyBindings({isChecked: observable}, testNode);
expect(testNode.childNodes[0].checked).toBe(true);
testNode.childNodes[0].click();
expect(observable()).toBe(false);
});
}); | the_stack |
declare namespace TL {
export var Timeline: ITimeline;
export interface ITimeline extends ITimelineEvents {
new (containerId: string, data: string | ITimelineConfig): ITimeline;
new (containerId: string, data: string | ITimelineConfig, options: ITimelineOptions): ITimeline;
goToId: (id: string | number) => void;
goTo: (n: number) => void;
goToStart: () => void;
goToEnd: () => void;
goToPrev: () => void;
goToNext: () => void;
add: (event: ITimelineSlideData) => void;
remove: (n: number) => void;
removeId: (id: string | number) => void;
getData: (n: number) => ITimelineSlideData;
getDataById: (id: string | number) => ITimelineSlideData;
getSlide: (n: number) => ITimelineSlide;
getSlideById: (id: string | number) => ITimelineSlide;
getCurrentSlide: () => ITimelineSlide;
updateDisplay: () => void;
setConfig: (config: ITimelineConfig) => void;
showMessage: (msg: string) => void;
zoomIn: () => void;
zoomOut: () => void;
setZoom: (level: number) => void;
current_id: string;
_getSlideIndex(id: string | number): number;
}
export interface ITimelineEvents {
addEventListener(type: string, fn: ()=>void, context?: any): ITimelineEvents;
hasEventListeners(type: string): boolean;
removeEventListener(type: string, fn:()=>void, context?: any): ITimelineEvents;
fireEvent(type: string, data?: any): ITimelineEvents;
on(type: string, fn: ()=>void, context?: any): ITimelineEvents;
off(type: string, fn:()=>void, context?: any): ITimelineEvents;
fire(type: string, data?: any): ITimelineEvents;
}
export interface ITimelineSlide {
data: ITimelineSlideData;
}
export interface ITimelineConfig {
events: ITimelineSlideData[];
title?: ITimelineSlideData | undefined;
eras?: ITimelineEra[] | undefined;
/*
* Either human or cosmological. If no scale is specified, the default is human. The cosmological scale is
* required to handle dates in the very distant past or future. (Before Tuesday, April 20th, 271,821 BCE
* after Saturday, September 13 275,760 CE) For the cosmological scale, only the year is considered, but it's
* OK to have a cosmological timeline with years between 271,821 BCE and 275,760 CE.
*/
scale?: 'human' | 'cosmological' | undefined;
}
export interface ITimelineSlideData {
/*
* Required for events, but not for `title` slides.
*/
start_date?: ITimelineDate | undefined;
end_date?: ITimelineDate | undefined;
/*
* Not required, but recommended.
*/
text?: ITimelineText | undefined;
media?: ITimelineMedia | undefined;
/*
* If present, Timeline will organize events with the same value for group to be in the same row or adjacent
* rows, separate from events in other groups. The common value for the group will be shown as a label at the
* left edge of the navigation.
*/
group?: string | undefined;
/*
* A string which will be used when Timeline displays the date for this. If used, override's display_date
* values set on the start or end date for this event, which is useful if you want to control how the two
* dates relate to each other.
*/
display_date?: string | undefined;
/*
* A Javascript object. The object can have these properties:
* url: the fully-qualified URL pointing to an image which will be used as the background
* color: a CSS color, in hexadecimal (e.g. #0f9bd1) or a valid CSS color keyword.
*/
background?: { url?: string | undefined, color?: string | undefined } | undefined;
/*
* Defaults to true, which means that Timeline will scan text fields and automatically add <a> tags so that
* links and email addresses are "clickable." If set to false, you may still manually apply the tags in the
* appropriate fields when you want links. Autolinking applies to the text field in a text object and the
* caption and credit fields in a media object.
*/
autolink?: boolean | undefined;
/*
* A string value which is unique among all slides in your timeline. If not specified, TimelineJS will
* construct an ID based on the headline, but if you later edit your headline, the ID will change. Unique IDs
* are used when the hash_bookmark option is used, and can also be used with the timeline.goToId() method to
* programmatically move the timeline to a specific slide.
*/
unique_id?: string | undefined;
}
/*
* Era objects are JSON objects which are used to label a span of time on the timeline navigation component. In
* structure, they are essentially very restricted "slide" objects.
*/
export interface ITimelineEra {
start_date: ITimelineDate;
end_date: ITimelineDate;
/*
* Not required, but recommended.
*/
text?: ITimelineText | undefined;
}
export interface ITimelineDate {
/*
* BCE years should be negative numbers.
*/
year: number;
/*
* 1-12
*/
month?: number | undefined;
day?: number | undefined;
/*
* 0-23
*/
hour?: number | undefined;
/*
* 0-59
*/
minute?: number | undefined;
/*
* 0-59
*/
second?: number | undefined;
millisecond?: number | undefined;
/*
* A string for presenting the date. Useful if Timeline's date formatting doesn't fit your needs.
*/
display_date?: string | undefined;
}
export interface ITimelineText {
/*
* HTML markup is OK. Blank is also OK.
*/
headline?: string | undefined;
/*
* HTML markup is OK. Blank is also OK. Not used for era objects.
*/
text?: string | undefined;
}
export interface ITimelineMedia {
/*
* In most cases, a URL (see https://timeline.knightlab.com/docs/media-types.html for complete details).
*/
url: string;
/*
* HTML markup is OK.
*/
caption?: string | undefined;
/*
* HTML markup is OK.
*/
credit?: string | undefined;
/*
* A URL for an image to use in the timenav marker for this event. If omitted, Timeline will use an icon based
* on the type of media. Not relevant for title slides, because they do not have a marker.
*/
thumbnail?: string | undefined;
}
export interface ITimelineOptions {
/*
* Default: false
* If true, copious console logging will be enabled.
*/
debug?: boolean | undefined;
/*
* Default: this._el.container.offsetHeight
* The height of the timeline.
*/
height?: number | undefined;
/*
* Default: this._el.container.offsetWidth
* The width of the timeline.
*/
width?: number | undefined;
/*
* Default: false
* If true, the class tl-timeline-embed is added to the outer Timeline container. Typically only used to support Timeline iframe embeds.
*/
is_embed?: boolean | undefined;
/*
* Default: false
* If set to true, TimelineJS will update the browser URL each time a slide advances, so that people can link directly to specific slides.
*/
hash_bookmark?: boolean | undefined;
/*
* Default: white
* RGB values to use for slide backgrounds. Specify as hex code, CSS named color, or a Javascript object with r, g, and b properties from 0-255.
*/
default_bg_color?: string | undefined;
/*
* Default: 2
* How many screen widths wide the timeline should be at first presentation.
*/
scale_factor?: number | undefined;
/*
* The position in the zoom_sequence series used to scale the Timeline when it is first created. Takes precedence over scale_factor.
*/
initial_zoom?: number | undefined;
/*
* Default: [0.5, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
* Array of values for TimeNav zoom levels. Each value is a scale_factor, which means that at any given level, the full timeline would require that many screens to display all events.
*/
zoom_sequence?: number[] | undefined;
/*
* Default: 'bottom'
* Display the timeline nav on the top or bottom.
*/
timenav_position?: 'bottom' | 'top' | undefined;
/*
* Default: 100
* optimal distance (in pixels) between ticks on axis
*/
optimal_tick_width?: number | undefined;
/*
* Default: 'tl-timeline
* Removing the tl-timeline base class will disable all default stylesheets.
*/
base_class?: string | undefined;
/*
* Default: 150
* The height in pixels of the timeline nav. Takes precedence over timenav_height_percentage.
*/
timenav_height?: number | undefined;
/*
* Default: 25
* Specify the timeline nav height as a percentage of the screen instead of in pixels.
*/
timenav_height_percentage?: number | undefined;
/*
* Default: 40
* Specify the timeline nav height as a percentage of a mobile device screen.
*/
timenav_mobile_height_percentage?: number | undefined;
/*
* Default: 150
* The minimum timeline nav height (in pixels).
*/
timenav_height_min?: number | undefined;
/*
* Default: 30
* The minimum marker height (in pixels).
*/
marker_height_min?: number | undefined;
/*
* Default: 100
* The minimum marker witdh (in pixels).
*/
marker_width_min?: number | undefined;
/*
* Default: 5
* Top and bottom padding (in pixels) for markers.
*/
marker_padding?: number | undefined;
/*
* Default: 0
* The first slide to display when the timeline is loaded.
*/
start_at_slide?: number | undefined;
/*
* Default: false
* If true, loads timeline on last slide.
*/
start_at_end?: boolean | undefined;
/*
* Default: 0
*/
menubar_height?: number | undefined;
/*
* Default: false
* Use declared suffix on dates earlier than 0.
*/
use_bc?: boolean | undefined;
/*
* Default: 1000
* Animation duration (in milliseconds).
*/
duration?: number | undefined;
/*
* Default: TL.Ease.easeInOutQuint
*/
ease?: (() => number) | undefined;
/*
* Default: true
*/
dragging?: boolean | undefined;
/*
* Default: true
*/
trackResize?: boolean | undefined;
/*
* Default: 100
* Padding (in pixels) on the left and right of each slide.
*/
slide_padding_lr?: number | undefined;
/*
* Default: '0%'
*/
slide_default_fade?: string | undefined;
/*
* Default: 'en'
*/
language?: string | undefined;
/*
* Default: null
* Google Analytics ID.
*/
ga_property_id?: any;
/*
* Default: ['back_to_start','nav_next','nav_previous','zoom_in','zoom_out']
*/
track_events?: ('back_to_start' | 'nav_next' | 'nav_previous' | 'zoom_in' | 'zoom_out')[] | undefined;
/*
* Default: ''
* Can be used to help Timeline load related resources such as CSS themes and language files. Rarely needs to be set.
*/
script_path?: string | undefined;
}
} | the_stack |
import TokenStore from '../backend/TokenStore'
import Router from './router'
import AppView from './appview'
import StreamingView from './streamingview'
import xCloudView from './xcloudview'
import appMenu from '../backend/appMenu'
import Plugins from '../frontend/plugins'
// Plugins
import { OpentrackPluginFrontend as OpentrackPlugin } from '../plugins/frontend/opentrack'
interface EventCallback {
(data: string): void;
}
export default class Application {
_eventOnWebToken: EventCallback[] = []
_eventOnStreamingToken: EventCallback[] = []
_eventOnxCloudStreamingToken: EventCallback[] = []
_tokenStore = new TokenStore()
_router = new Router()
_AppView:AppView
_StreamingView:StreamingView
_xCloudView:xCloudView
_menu:appMenu
_plugins:Plugins
_ipc:any
constructor(){
this.listenForTokens()
this._ipc = window.require('electron').ipcRenderer
// this._menu = new appMenu()
// this._plugins = new Plugins(this._menu, this._tokenStore)
this._plugins = new Plugins(this)
this._plugins.load('opentrack', OpentrackPlugin)
// Load plugins here
// this._plugins.load('opentrack', OpentrackPlugin)
// this._plugins.load('webui', WebuiPlugin)
// Load splashscreen for one second to let the application to lookup existing cookies.
setTimeout(() => {
// @TODO: Add check for expires cookies?
if(this._tokenStore._web.uhs === '' && this._tokenStore._streamingToken === ''){
this._router.setView('auth')
}
}, 1000)
const debugStreamingView = (<HTMLInputElement>document.getElementById('actionBarStreamingView'))
debugStreamingView.style.display = (process.env.ISDEV !== undefined) ? 'block': 'none'
const debugPlugins = (<HTMLInputElement>document.getElementById('actionBarPlugins'))
debugPlugins.style.display = (process.env.ISDEV !== undefined) ? 'inline-block': 'none'
this._router.addEventListener('onviewshow', (event:any) => {
// Check if we need the actionbar
if(event.view === 'app' || event.view === 'streaming' || event.view === 'xCloud'){
const actionBar = (<HTMLInputElement>document.getElementById('actionBar'))
actionBar.style.display = 'block'
} else {
const actionBar = (<HTMLInputElement>document.getElementById('actionBar'))
actionBar.style.display = 'none'
}
// Load Appviews
if(event.view === 'auth'){
const backgrounds = [
'linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url(\'assets/images/background_1.jpg\')',
'linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url(\'assets/images/background_2.jpg\')',
'linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url(\'assets/images/background_3.jpg\')',
'linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url(\'assets/images/background_4.jpg\')',
]
const authView = (<HTMLInputElement>document.getElementById('authView'))
// appView.style.backgroundImage = "linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url('assets/images/background_1.jpg')"
// appView.style.backgroundImage = "linear-gradient(0deg, rgba(26,27,30,1) 0%, rgba(26,27,30,1) 50%, rgba(0,212,255,0) 100%), url('assets/images/background_2.jpg')"
const randomSelect = backgrounds[Math.floor(Math.random()*backgrounds.length)];
authView.style.backgroundImage = randomSelect
// open popup?
const url = "https://account.xbox.com/account/signin?returnUrl=https%3A%2F%2Fwww.xbox.com%2Fen-US%2Fplay&ru=https%3A%2F%2Fwww.xbox.com%2Fen-US%2Fplay"
window.open(url)
} else if(event.view === 'app'){
if(this._AppView === undefined){
this._AppView = new AppView(this)
}
this._AppView.load()
} else if(event.previousView === 'app'){
// Unload appview
if(this._AppView !== undefined){
this._AppView.unload()
}
}
if(event.view === 'streaming'){
if(this._StreamingView === undefined){
this._StreamingView = new StreamingView(this)
}
this._StreamingView.load()
} else if(event.previousView === 'streaming'){
// Unload appview
if(this._StreamingView !== undefined){
this._StreamingView.unload()
}
}
if(event.view === 'xCloud'){
if(this._xCloudView === undefined){
this._xCloudView = new xCloudView(this)
}
this._xCloudView.load()
} else if(event.previousView === 'xCloud'){
// Unload appview
if(this._xCloudView !== undefined){
this._xCloudView.unload()
}
}
})
// Build nav
document.getElementById('actionBarMyConsoles').addEventListener('click', (e:Event) => {
this._router.setView('app')
})
document.getElementById('actionBarxCloud').addEventListener('click', (e:Event) => {
this._router.setView('xCloud')
})
document.getElementById('actionBarStreamingView').addEventListener('click', (e:Event) => {
this._router.setView('streaming')
})
document.getElementById('actionBarStreamingViewActive').addEventListener('click', (e:Event) => {
this._router.setView('streaming')
})
document.getElementById('pluginsMenulink').addEventListener('click', (e:Event) => {
// Show debug panel?
if(document.getElementById('pluginsTooltip').style.display === 'none'){
document.getElementById('pluginsTooltip').style.display = 'block'
} else {
document.getElementById('pluginsTooltip').style.display = 'none'
}
})
}
listenForTokens():void {
const inputWebUhs = document.getElementById('token_web_uhs')
const inputWebUserToken = document.getElementById('token_web_usertoken')
const inputStreamingToken = document.getElementById('token_streaming_token')
const inputxCloudStreamingToken = document.getElementById('token_xcloud_streaming_token')
const inputxCloudStreamingHost = document.getElementById('token_xcloud_streaming_host')
const inputxCloudMSALToken = document.getElementById('token_xcloud_msal_token')
const inputWebTokenInterval = setInterval(() => {
const valueUhs = (<HTMLInputElement>inputWebUhs).value
const valueUserToken = (<HTMLInputElement>inputWebUserToken).value
if(valueUhs !== '' && valueUserToken !== ''){
clearInterval(inputWebTokenInterval)
this._tokenStore.setWebTokens(valueUhs, valueUserToken)
inputWebUhs.remove()
inputWebUserToken.remove()
}
}, 100)
const inputStreamingTokenInterval = setInterval(() => {
const value = (<HTMLInputElement>inputStreamingToken).value
if(value !== ''){
clearInterval(inputStreamingTokenInterval)
this._tokenStore.setStreamingToken(value)
inputStreamingToken.remove()
}
}, 100)
const inputxCloudStreamingTokenInterval = setInterval(() => {
const value = (<HTMLInputElement>inputxCloudStreamingToken).value
const host = (<HTMLInputElement>inputxCloudStreamingHost).value
if(value !== '' && host !== ''){
clearInterval(inputxCloudStreamingTokenInterval)
this._tokenStore.setxCloudStreamingToken(value, host)
inputxCloudStreamingToken.remove()
inputxCloudStreamingHost.remove()
}
}, 100)
const inputxCloudMSALTokenInterval = setInterval(() => {
const value = (<HTMLInputElement>inputxCloudMSALToken).value
if(value !== ''){
clearInterval(inputxCloudMSALTokenInterval)
this._tokenStore.setMSALToken(value)
inputxCloudMSALToken.remove()
}
}, 100)
this._tokenStore.addEventListener('onwebtoken', (tokens) => {
if(this._tokenStore._web.uhs !== '' && this._tokenStore._streamingToken !== ''){
this._router.setView('app')
}
})
this._tokenStore.addEventListener('onstreamingtoken', (token) => {
if(this._tokenStore._web.uhs !== '' && this._tokenStore._streamingToken !== ''){
this._router.setView('app')
}
})
this._tokenStore.addEventListener('onxcloudstreamingtoken', (token) => {
const xCloudMenuItem = document.getElementById('actionBarxCloud')
xCloudMenuItem.style.display = 'inline-block'
})
// this._tokenStore.addEventListener('onmsaltoken', (token) => {
// // @TODO: Enable xCloud integration
// })
}
startStream(type: string, serverId:string):void {
this._router.setView('streaming')
if(this._StreamingView !== undefined){
this._StreamingView.startStream(type, serverId)
}
}
addEventListener(name: string, callback: EventCallback):void{
if(name === 'onwebtoken'){
this._eventOnWebToken.push(callback)
} else if(name === 'onstreamingtoken'){
this._eventOnStreamingToken.push(callback)
} else if(name === 'onxcloudstreamingtoken'){
this._eventOnxCloudStreamingToken.push(callback)
}
}
emitEvent(name: string, data: any):void{
if(name === 'onwebtoken'){
for(const eventCallback in this._eventOnWebToken){
this._eventOnWebToken[eventCallback](data)
}
} else if(name === 'onstreamingtoken'){
for(const eventCallback in this._eventOnStreamingToken){
this._eventOnStreamingToken[eventCallback](data)
}
} else if(name === 'onxcloudstreamingtoken'){
for(const eventCallback in this._eventOnxCloudStreamingToken){
this._eventOnxCloudStreamingToken[eventCallback](data)
}
}
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* UrlMaps are used to route requests to a backend service based on rules
* that you define for the host and path of an incoming URL.
*
* ## Example Usage
* ### Region Url Map Basic
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const _default = new gcp.compute.RegionHealthCheck("default", {
* region: "us-central1",
* checkIntervalSec: 1,
* timeoutSec: 1,
* httpHealthCheck: {
* port: 80,
* requestPath: "/",
* },
* });
* const login = new gcp.compute.RegionBackendService("login", {
* region: "us-central1",
* protocol: "HTTP",
* loadBalancingScheme: "INTERNAL_MANAGED",
* timeoutSec: 10,
* healthChecks: [_default.id],
* });
* const home = new gcp.compute.RegionBackendService("home", {
* region: "us-central1",
* protocol: "HTTP",
* loadBalancingScheme: "INTERNAL_MANAGED",
* timeoutSec: 10,
* healthChecks: [_default.id],
* });
* const regionurlmap = new gcp.compute.RegionUrlMap("regionurlmap", {
* region: "us-central1",
* description: "a description",
* defaultService: home.id,
* hostRules: [{
* hosts: ["mysite.com"],
* pathMatcher: "allpaths",
* }],
* pathMatchers: [{
* name: "allpaths",
* defaultService: home.id,
* pathRules: [
* {
* paths: ["/home"],
* service: home.id,
* },
* {
* paths: ["/login"],
* service: login.id,
* },
* ],
* }],
* tests: [{
* service: home.id,
* host: "hi.com",
* path: "/home",
* }],
* });
* ```
* ### Region Url Map L7 Ilb Path
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const _default = new gcp.compute.RegionHealthCheck("default", {httpHealthCheck: {
* port: 80,
* }});
* const home = new gcp.compute.RegionBackendService("home", {
* protocol: "HTTP",
* timeoutSec: 10,
* healthChecks: [_default.id],
* loadBalancingScheme: "INTERNAL_MANAGED",
* });
* const regionurlmap = new gcp.compute.RegionUrlMap("regionurlmap", {
* description: "a description",
* defaultService: home.id,
* hostRules: [{
* hosts: ["mysite.com"],
* pathMatcher: "allpaths",
* }],
* pathMatchers: [{
* name: "allpaths",
* defaultService: home.id,
* pathRules: [{
* paths: ["/home"],
* routeAction: {
* corsPolicy: {
* allowCredentials: true,
* allowHeaders: ["Allowed content"],
* allowMethods: ["GET"],
* allowOrigins: ["Allowed origin"],
* exposeHeaders: ["Exposed header"],
* maxAge: 30,
* disabled: false,
* },
* faultInjectionPolicy: {
* abort: {
* httpStatus: 234,
* percentage: 5.6,
* },
* delay: {
* fixedDelay: {
* seconds: 0,
* nanos: 50000,
* },
* percentage: 7.8,
* },
* },
* requestMirrorPolicy: {
* backendService: home.id,
* },
* retryPolicy: {
* numRetries: 4,
* perTryTimeout: {
* seconds: 30,
* },
* retryConditions: [
* "5xx",
* "deadline-exceeded",
* ],
* },
* timeout: {
* seconds: 20,
* nanos: 750000000,
* },
* urlRewrite: {
* hostRewrite: "A replacement header",
* pathPrefixRewrite: "A replacement path",
* },
* weightedBackendServices: [{
* backendService: home.id,
* weight: 400,
* headerAction: {
* requestHeadersToRemoves: ["RemoveMe"],
* requestHeadersToAdds: [{
* headerName: "AddMe",
* headerValue: "MyValue",
* replace: true,
* }],
* responseHeadersToRemoves: ["RemoveMe"],
* responseHeadersToAdds: [{
* headerName: "AddMe",
* headerValue: "MyValue",
* replace: false,
* }],
* },
* }],
* },
* }],
* }],
* tests: [{
* service: home.id,
* host: "hi.com",
* path: "/home",
* }],
* });
* ```
* ### Region Url Map L7 Ilb Path Partial
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const _default = new gcp.compute.RegionHealthCheck("default", {httpHealthCheck: {
* port: 80,
* }});
* const home = new gcp.compute.RegionBackendService("home", {
* protocol: "HTTP",
* timeoutSec: 10,
* healthChecks: [_default.id],
* loadBalancingScheme: "INTERNAL_MANAGED",
* });
* const regionurlmap = new gcp.compute.RegionUrlMap("regionurlmap", {
* description: "a description",
* defaultService: home.id,
* hostRules: [{
* hosts: ["mysite.com"],
* pathMatcher: "allpaths",
* }],
* pathMatchers: [{
* name: "allpaths",
* defaultService: home.id,
* pathRules: [{
* paths: ["/home"],
* routeAction: {
* retryPolicy: {
* numRetries: 4,
* perTryTimeout: {
* seconds: 30,
* },
* retryConditions: [
* "5xx",
* "deadline-exceeded",
* ],
* },
* timeout: {
* seconds: 20,
* nanos: 750000000,
* },
* urlRewrite: {
* hostRewrite: "A replacement header",
* pathPrefixRewrite: "A replacement path",
* },
* weightedBackendServices: [{
* backendService: home.id,
* weight: 400,
* headerAction: {
* responseHeadersToAdds: [{
* headerName: "AddMe",
* headerValue: "MyValue",
* replace: false,
* }],
* },
* }],
* },
* }],
* }],
* tests: [{
* service: home.id,
* host: "hi.com",
* path: "/home",
* }],
* });
* ```
* ### Region Url Map L7 Ilb Route
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const _default = new gcp.compute.RegionHealthCheck("default", {httpHealthCheck: {
* port: 80,
* }});
* const home = new gcp.compute.RegionBackendService("home", {
* protocol: "HTTP",
* timeoutSec: 10,
* healthChecks: [_default.id],
* loadBalancingScheme: "INTERNAL_MANAGED",
* });
* const regionurlmap = new gcp.compute.RegionUrlMap("regionurlmap", {
* description: "a description",
* defaultService: home.id,
* hostRules: [{
* hosts: ["mysite.com"],
* pathMatcher: "allpaths",
* }],
* pathMatchers: [{
* name: "allpaths",
* defaultService: home.id,
* routeRules: [{
* priority: 1,
* headerAction: {
* requestHeadersToRemoves: ["RemoveMe2"],
* requestHeadersToAdds: [{
* headerName: "AddSomethingElse",
* headerValue: "MyOtherValue",
* replace: true,
* }],
* responseHeadersToRemoves: ["RemoveMe3"],
* responseHeadersToAdds: [{
* headerName: "AddMe",
* headerValue: "MyValue",
* replace: false,
* }],
* },
* matchRules: [{
* fullPathMatch: "a full path",
* headerMatches: [{
* headerName: "someheader",
* exactMatch: "match this exactly",
* invertMatch: true,
* }],
* ignoreCase: true,
* metadataFilters: [{
* filterMatchCriteria: "MATCH_ANY",
* filterLabels: [{
* name: "PLANET",
* value: "MARS",
* }],
* }],
* queryParameterMatches: [{
* name: "a query parameter",
* presentMatch: true,
* }],
* }],
* urlRedirect: {
* hostRedirect: "A host",
* httpsRedirect: false,
* pathRedirect: "some/path",
* redirectResponseCode: "TEMPORARY_REDIRECT",
* stripQuery: true,
* },
* }],
* }],
* tests: [{
* service: home.id,
* host: "hi.com",
* path: "/home",
* }],
* });
* ```
* ### Region Url Map L7 Ilb Route Partial
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const _default = new gcp.compute.RegionHealthCheck("default", {httpHealthCheck: {
* port: 80,
* }});
* const home = new gcp.compute.RegionBackendService("home", {
* protocol: "HTTP",
* timeoutSec: 10,
* healthChecks: [_default.id],
* loadBalancingScheme: "INTERNAL_MANAGED",
* });
* const regionurlmap = new gcp.compute.RegionUrlMap("regionurlmap", {
* description: "a description",
* defaultService: home.id,
* hostRules: [{
* hosts: ["mysite.com"],
* pathMatcher: "allpaths",
* }],
* pathMatchers: [{
* name: "allpaths",
* defaultService: home.id,
* routeRules: [{
* priority: 1,
* service: home.id,
* headerAction: {
* requestHeadersToRemoves: ["RemoveMe2"],
* },
* matchRules: [{
* fullPathMatch: "a full path",
* headerMatches: [{
* headerName: "someheader",
* exactMatch: "match this exactly",
* invertMatch: true,
* }],
* queryParameterMatches: [{
* name: "a query parameter",
* presentMatch: true,
* }],
* }],
* }],
* }],
* tests: [{
* service: home.id,
* host: "hi.com",
* path: "/home",
* }],
* });
* ```
*
* ## Import
*
* RegionUrlMap can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:compute/regionUrlMap:RegionUrlMap default projects/{{project}}/regions/{{region}}/urlMaps/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/regionUrlMap:RegionUrlMap default {{project}}/{{region}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/regionUrlMap:RegionUrlMap default {{region}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/regionUrlMap:RegionUrlMap default {{name}}
* ```
*/
export class RegionUrlMap extends pulumi.CustomResource {
/**
* Get an existing RegionUrlMap resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: RegionUrlMapState, opts?: pulumi.CustomResourceOptions): RegionUrlMap {
return new RegionUrlMap(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:compute/regionUrlMap:RegionUrlMap';
/**
* Returns true if the given object is an instance of RegionUrlMap. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is RegionUrlMap {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === RegionUrlMap.__pulumiType;
}
/**
* Creation timestamp in RFC3339 text format.
*/
public /*out*/ readonly creationTimestamp!: pulumi.Output<string>;
/**
* A reference to a RegionBackendService resource. This will be used if
* none of the pathRules defined by this PathMatcher is matched by
* the URL's path portion.
*/
public readonly defaultService!: pulumi.Output<string | undefined>;
/**
* When none of the specified hostRules match, the request is redirected to a URL specified
* by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
* defaultRouteAction must not be set.
* Structure is documented below.
*/
public readonly defaultUrlRedirect!: pulumi.Output<outputs.compute.RegionUrlMapDefaultUrlRedirect | undefined>;
/**
* Description of this test case.
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* Fingerprint of this resource. This field is used internally during updates of this resource.
*/
public /*out*/ readonly fingerprint!: pulumi.Output<string>;
/**
* The list of HostRules to use against the URL.
* Structure is documented below.
*/
public readonly hostRules!: pulumi.Output<outputs.compute.RegionUrlMapHostRule[] | undefined>;
/**
* The unique identifier for the resource.
*/
public /*out*/ readonly mapId!: pulumi.Output<number>;
/**
* The name of the query parameter to match. The query parameter must exist in the
* request, in the absence of which the request match fails.
*/
public readonly name!: pulumi.Output<string>;
/**
* The name of the PathMatcher to use to match the path portion of
* the URL if the hostRule matches the URL's host portion.
*/
public readonly pathMatchers!: pulumi.Output<outputs.compute.RegionUrlMapPathMatcher[] | undefined>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
public readonly project!: pulumi.Output<string>;
/**
* The Region in which the url map should reside.
* If it is not provided, the provider region is used.
*/
public readonly region!: pulumi.Output<string>;
/**
* The URI of the created resource.
*/
public /*out*/ readonly selfLink!: pulumi.Output<string>;
/**
* The list of expected URL mappings. Requests to update this UrlMap will
* succeed only if all of the test cases pass.
* Structure is documented below.
*/
public readonly tests!: pulumi.Output<outputs.compute.RegionUrlMapTest[] | undefined>;
/**
* Create a RegionUrlMap resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args?: RegionUrlMapArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: RegionUrlMapArgs | RegionUrlMapState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as RegionUrlMapState | undefined;
inputs["creationTimestamp"] = state ? state.creationTimestamp : undefined;
inputs["defaultService"] = state ? state.defaultService : undefined;
inputs["defaultUrlRedirect"] = state ? state.defaultUrlRedirect : undefined;
inputs["description"] = state ? state.description : undefined;
inputs["fingerprint"] = state ? state.fingerprint : undefined;
inputs["hostRules"] = state ? state.hostRules : undefined;
inputs["mapId"] = state ? state.mapId : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["pathMatchers"] = state ? state.pathMatchers : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["region"] = state ? state.region : undefined;
inputs["selfLink"] = state ? state.selfLink : undefined;
inputs["tests"] = state ? state.tests : undefined;
} else {
const args = argsOrState as RegionUrlMapArgs | undefined;
inputs["defaultService"] = args ? args.defaultService : undefined;
inputs["defaultUrlRedirect"] = args ? args.defaultUrlRedirect : undefined;
inputs["description"] = args ? args.description : undefined;
inputs["hostRules"] = args ? args.hostRules : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["pathMatchers"] = args ? args.pathMatchers : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["region"] = args ? args.region : undefined;
inputs["tests"] = args ? args.tests : undefined;
inputs["creationTimestamp"] = undefined /*out*/;
inputs["fingerprint"] = undefined /*out*/;
inputs["mapId"] = undefined /*out*/;
inputs["selfLink"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(RegionUrlMap.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering RegionUrlMap resources.
*/
export interface RegionUrlMapState {
/**
* Creation timestamp in RFC3339 text format.
*/
creationTimestamp?: pulumi.Input<string>;
/**
* A reference to a RegionBackendService resource. This will be used if
* none of the pathRules defined by this PathMatcher is matched by
* the URL's path portion.
*/
defaultService?: pulumi.Input<string>;
/**
* When none of the specified hostRules match, the request is redirected to a URL specified
* by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
* defaultRouteAction must not be set.
* Structure is documented below.
*/
defaultUrlRedirect?: pulumi.Input<inputs.compute.RegionUrlMapDefaultUrlRedirect>;
/**
* Description of this test case.
*/
description?: pulumi.Input<string>;
/**
* Fingerprint of this resource. This field is used internally during updates of this resource.
*/
fingerprint?: pulumi.Input<string>;
/**
* The list of HostRules to use against the URL.
* Structure is documented below.
*/
hostRules?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapHostRule>[]>;
/**
* The unique identifier for the resource.
*/
mapId?: pulumi.Input<number>;
/**
* The name of the query parameter to match. The query parameter must exist in the
* request, in the absence of which the request match fails.
*/
name?: pulumi.Input<string>;
/**
* The name of the PathMatcher to use to match the path portion of
* the URL if the hostRule matches the URL's host portion.
*/
pathMatchers?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapPathMatcher>[]>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* The Region in which the url map should reside.
* If it is not provided, the provider region is used.
*/
region?: pulumi.Input<string>;
/**
* The URI of the created resource.
*/
selfLink?: pulumi.Input<string>;
/**
* The list of expected URL mappings. Requests to update this UrlMap will
* succeed only if all of the test cases pass.
* Structure is documented below.
*/
tests?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapTest>[]>;
}
/**
* The set of arguments for constructing a RegionUrlMap resource.
*/
export interface RegionUrlMapArgs {
/**
* A reference to a RegionBackendService resource. This will be used if
* none of the pathRules defined by this PathMatcher is matched by
* the URL's path portion.
*/
defaultService?: pulumi.Input<string>;
/**
* When none of the specified hostRules match, the request is redirected to a URL specified
* by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
* defaultRouteAction must not be set.
* Structure is documented below.
*/
defaultUrlRedirect?: pulumi.Input<inputs.compute.RegionUrlMapDefaultUrlRedirect>;
/**
* Description of this test case.
*/
description?: pulumi.Input<string>;
/**
* The list of HostRules to use against the URL.
* Structure is documented below.
*/
hostRules?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapHostRule>[]>;
/**
* The name of the query parameter to match. The query parameter must exist in the
* request, in the absence of which the request match fails.
*/
name?: pulumi.Input<string>;
/**
* The name of the PathMatcher to use to match the path portion of
* the URL if the hostRule matches the URL's host portion.
*/
pathMatchers?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapPathMatcher>[]>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* The Region in which the url map should reside.
* If it is not provided, the provider region is used.
*/
region?: pulumi.Input<string>;
/**
* The list of expected URL mappings. Requests to update this UrlMap will
* succeed only if all of the test cases pass.
* Structure is documented below.
*/
tests?: pulumi.Input<pulumi.Input<inputs.compute.RegionUrlMapTest>[]>;
} | the_stack |
import * as timeSpan from 'time-span';
import TransactionUnderProcessingModel, { TransactionProcessingStatus } from './models/TransactionUnderProcessingModel';
import EventCode from './EventCode';
import EventEmitter from '../common/EventEmitter';
import IBlockchain from './interfaces/IBlockchain';
import IOperationStore from './interfaces/IOperationStore';
import ITransactionProcessor from './interfaces/ITransactionProcessor';
import ITransactionStore from './interfaces/ITransactionStore';
import IUnresolvableTransactionStore from './interfaces/IUnresolvableTransactionStore';
import IVersionManager from './interfaces/IVersionManager';
import Logger from '../common/Logger';
import SharedErrorCode from '../common/SharedErrorCode';
import SidetreeError from '../common/SidetreeError';
import ThroughputLimiter from './ThroughputLimiter';
import TransactionModel from '../common/models/TransactionModel';
/**
* Class that performs periodic processing of batches of Sidetree operations anchored to the blockchain.
*/
export default class Observer {
/**
* Denotes if the periodic transaction processing should continue to occur.
* Used mainly for test purposes.
*/
private continuePeriodicProcessing = false;
/**
* The list of transactions that are being downloaded or processed.
*/
private transactionsUnderProcessing: TransactionUnderProcessingModel[] = [];
/**
* This is the transaction that is used as a cursor/timestamp to fetch newer transaction.
*/
private cursorTransaction: TransactionModel | undefined;
private throughputLimiter: ThroughputLimiter;
public constructor (
private versionManager: IVersionManager,
private blockchain: IBlockchain,
private maxConcurrentDownloads: number,
private operationStore: IOperationStore,
private transactionStore: ITransactionStore,
private unresolvableTransactionStore: IUnresolvableTransactionStore,
private observingIntervalInSeconds: number) {
this.throughputLimiter = new ThroughputLimiter(versionManager);
}
/**
* The method that starts the periodic polling and processing of Sidetree operations.
*/
public async startPeriodicProcessing () {
Logger.info(`Starting periodic transactions processing.`);
setImmediate(async () => {
this.continuePeriodicProcessing = true;
this.processTransactions();
});
}
/**
* Stops periodic transaction processing.
* Mainly used for test purposes.
*/
public stopPeriodicProcessing () {
Logger.info(`Stopped periodic transactions processing.`);
this.continuePeriodicProcessing = false;
}
/**
* Processes new transactions if any, then reprocess a set of unresolvable transactions if any,
* then schedules the next round of processing unless `stopPeriodicProcessing()` is invoked.
*/
private async processTransactions () {
try {
// Optional update to store the processed transactions that completed in between the polling periods.
await this.storeThenTrimConsecutiveTransactionsProcessed();
// Keep fetching new Sidetree transactions from blockchain and processing them
// until there are no more new transactions or there is a block reorganization.
let moreTransactions = false;
do {
if (this.cursorTransaction === undefined) {
this.cursorTransaction = await this.transactionStore.getLastTransaction();
}
const cursorTransactionNumber = this.cursorTransaction ? this.cursorTransaction.transactionNumber : undefined;
const cursorTransactionTimeHash = this.cursorTransaction ? this.cursorTransaction.transactionTimeHash : undefined;
const cursorTransactionTime = this.cursorTransaction ? this.cursorTransaction.transactionTime : 0;
let invalidTransactionNumberOrTimeHash = false;
let readResult;
const endTimer = timeSpan(); // Measure time taken to go blockchain read.
try {
Logger.info('Fetching Sidetree transactions from blockchain service...');
readResult = await this.blockchain.read(cursorTransactionNumber, cursorTransactionTimeHash);
Logger.info(`Fetched ${readResult.transactions.length} Sidetree transactions from blockchain service in ${endTimer.rounded()} ms.`);
} catch (error) {
if (error instanceof SidetreeError && error.code === SharedErrorCode.InvalidTransactionNumberOrTimeHash) {
Logger.info(`Invalid transaction number ${cursorTransactionNumber} or time hash ${cursorTransactionTimeHash} given to blockchain service.`);
invalidTransactionNumberOrTimeHash = true;
} else {
throw error;
}
}
const transactions = readResult ? readResult.transactions : [];
moreTransactions = readResult ? readResult.moreTransactions : false;
// Set the cursor for fetching of next transaction batch in the next loop.
if (transactions.length > 0) {
this.cursorTransaction = transactions[transactions.length - 1];
}
// Queue parallel downloading and processing of chunk files.
let qualifiedTransactions = await this.throughputLimiter.getQualifiedTransactions(transactions);
qualifiedTransactions = qualifiedTransactions.sort((a, b) => { return a.transactionNumber - b.transactionNumber; });
for (const transaction of qualifiedTransactions) {
const transactionUnderProcessing = {
transaction: transaction,
processingStatus: TransactionProcessingStatus.Processing
};
this.transactionsUnderProcessing.push(transactionUnderProcessing);
// Intentionally not awaiting on downloading and processing each operation batch.
this.processTransaction(transaction, transactionUnderProcessing);
}
// NOTE: Blockchain reorg has happened for sure only if `invalidTransactionNumberOrTimeHash` AND
// latest transaction time is less or equal to blockchain service time.
// This check will prevent Core from reverting transactions if/when blockchain service is re-initializing its data itself.
let blockReorganizationDetected = false;
if (invalidTransactionNumberOrTimeHash) {
const latestBlockchainTime = await this.blockchain.getLatestTime();
if (cursorTransactionTime <= latestBlockchainTime.time) {
blockReorganizationDetected = true;
moreTransactions = true;
} else {
Logger.info(`Blockchain microservice blockchain time is behind last known transaction time, waiting for blockchain microservice to catch up...`);
}
}
// If block reorg is detected, we must wait until no more operation processing is pending,
// then revert invalid transaction and operations.
if (blockReorganizationDetected) {
Logger.info(`Block reorganization detected.`);
EventEmitter.emit(EventCode.SidetreeObserverBlockReorganization);
await Observer.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(this.transactionsUnderProcessing, 0);
await this.storeThenTrimConsecutiveTransactionsProcessed(); // This is an optional optimization to give best the chance of minimal revert dataset.
Logger.info(`Reverting invalid transactions...`);
await this.revertInvalidTransactions();
Logger.info(`Completed reverting invalid transactions.`);
this.cursorTransaction = undefined;
} else {
// Else it means all transactions fetched are good for processing.
// We hold off from fetching more transactions if the list of transactions under processing gets too long.
// We will wait for count of transaction being processed to fall to the maximum allowed concurrent downloads
// before attempting further transaction fetches.
await Observer.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(this.transactionsUnderProcessing, this.maxConcurrentDownloads);
await this.storeThenTrimConsecutiveTransactionsProcessed();
// If there is an error in processing a transaction that PREVENTS processing subsequent Sidetree transactions from the blockchain
// (e.g. A DB outage/error that prevents us from recording a transaction for retries),
// erase the entire list transactions under processing since processing MUST not advance beyond the transaction that failed processing.
const hasErrorInTransactionProcessing = this.hasErrorInTransactionProcessing();
if (hasErrorInTransactionProcessing) {
// Step to defend against potential uncontrolled growth in `transactionsUnderProcessing` array size due to looping.
await Observer.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(this.transactionsUnderProcessing, 0);
await this.storeThenTrimConsecutiveTransactionsProcessed();
// Clear the the entire list of transactions under processing since we have cannot advance further due to error.
this.transactionsUnderProcessing = [];
this.cursorTransaction = undefined;
}
}
} while (moreTransactions);
Logger.info('Successfully kicked off downloading/processing of all new Sidetree transactions.');
// Continue onto processing unresolvable transactions if any.
await this.processUnresolvableTransactions();
EventEmitter.emit(EventCode.SidetreeObserverLoopSuccess);
} catch (error) {
EventEmitter.emit(EventCode.SidetreeObserverLoopFailure);
Logger.error(`Encountered unhandled and possibly fatal Observer error, must investigate and fix:`);
Logger.error(error);
} finally {
if (this.continuePeriodicProcessing) {
Logger.info(`Waiting for ${this.observingIntervalInSeconds} seconds before fetching and processing transactions again.`);
setTimeout(async () => this.processTransactions(), this.observingIntervalInSeconds * 1000);
}
}
}
/**
* Gets the total count of the transactions given that are still under processing.
*/
private static getCountOfTransactionsUnderProcessing (transactionsUnderProcessing: TransactionUnderProcessingModel[]): number {
const countOfTransactionsUnderProcessing = transactionsUnderProcessing.filter(
transaction => transaction.processingStatus === TransactionProcessingStatus.Processing
).length;
return countOfTransactionsUnderProcessing;
}
/**
* Returns true if at least processing of one transaction resulted in an error that prevents advancement of transaction processing.
*/
private hasErrorInTransactionProcessing (): boolean {
const firstTransactionProcessingError = this.transactionsUnderProcessing.find(
transaction => transaction.processingStatus === TransactionProcessingStatus.Error
);
return (firstTransactionProcessingError !== undefined);
}
private static async waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo (
transactionsUnderProcessing: TransactionUnderProcessingModel[],
count: number) {
let countOfTransactionsUnderProcessing = Observer.getCountOfTransactionsUnderProcessing(transactionsUnderProcessing);
while (countOfTransactionsUnderProcessing > count) {
// Wait a little before checking again.
await new Promise(resolve => setTimeout(resolve, 1000));
countOfTransactionsUnderProcessing = Observer.getCountOfTransactionsUnderProcessing(transactionsUnderProcessing);
}
}
/**
* Attempts to fetch and process unresolvable transactions due for retry.
* Waits until all unresolvable transactions due for retry are processed.
*/
private async processUnresolvableTransactions () {
Logger.info(`Processing previously unresolvable transactions if any...`);
const endTimer = timeSpan();
const unresolvableTransactions = await this.unresolvableTransactionStore.getUnresolvableTransactionsDueForRetry();
Logger.info(`Fetched ${unresolvableTransactions.length} unresolvable transactions to retry in ${endTimer.rounded()} ms.`);
// Download and process each unresolvable transactions.
const unresolvableTransactionStatus = [];
for (const transaction of unresolvableTransactions) {
const awaitingTransaction = {
transaction: transaction,
processingStatus: TransactionProcessingStatus.Processing
};
unresolvableTransactionStatus.push(awaitingTransaction);
// Intentionally not awaiting on downloading and processing each operation batch.
this.processTransaction(transaction, awaitingTransaction);
}
await Observer.waitUntilCountOfTransactionsUnderProcessingIsLessOrEqualTo(unresolvableTransactionStatus, 0);
}
/**
* Goes through `transactionsUnderProcessing` in chronological order, records every consecutive processed transaction in the transaction store,
* then remove them from `transactionsUnderProcessing` and update the in memory `lastConsecutivelyProcessedTransaction`.
*
* NOTE: this excludes transaction processing that resulted in `TransactionProcessingStatus.Error`,
* because such error includes the case when the code fails to store the transaction to the retry table for future retry,
* adding it to the transaction table means such transaction won't be processed again, resulting in missing operation data.
* @returns The last transaction consecutively processed.
*/
private async storeThenTrimConsecutiveTransactionsProcessed () {
let lastConsecutivelyProcessedTransaction;
let i = 0;
while (i < this.transactionsUnderProcessing.length &&
this.transactionsUnderProcessing[i].processingStatus === TransactionProcessingStatus.Processed) {
lastConsecutivelyProcessedTransaction = this.transactionsUnderProcessing[i].transaction;
await this.transactionStore.addTransaction(lastConsecutivelyProcessedTransaction);
i++;
}
// Trim off consecutive transactions that are processed successfully.
this.transactionsUnderProcessing.splice(0, i);
}
/**
* Processes the given transaction by passing the transaction to the right version of the transaction processor based on the transaction time.
* The transaction processing generically involves first downloading DID operation data from CAS (Content Addressable Storage),
* then storing the operations indexed/grouped by DIDs in the persistent operation DB.
*/
private async processTransaction (transaction: TransactionModel, transactionUnderProcessing: TransactionUnderProcessingModel) {
let transactionProcessedSuccessfully;
try {
const transactionProcessor: ITransactionProcessor = this.versionManager.getTransactionProcessor(transaction.transactionTime);
transactionProcessedSuccessfully = await transactionProcessor.processTransaction(transaction);
} catch (error) {
Logger.error(`Unhandled error encountered processing transaction '${transaction.transactionNumber}'.`);
Logger.error(error);
transactionProcessedSuccessfully = false;
}
if (transactionProcessedSuccessfully) {
Logger.info(`Removing transaction '${transaction.transactionNumber}' from unresolvable transactions if exists...`);
this.unresolvableTransactionStore.removeUnresolvableTransaction(transaction); // Skip await since failure is not a critical and results in a retry.
} else {
try {
Logger.info(`Recording failed processing attempt for transaction '${transaction.transactionNumber}'...`);
await this.unresolvableTransactionStore.recordUnresolvableTransactionFetchAttempt(transaction);
} catch (error) {
transactionUnderProcessing.processingStatus = TransactionProcessingStatus.Error;
Logger.error(`Error encountered saving unresolvable transaction '${transaction.transactionNumber}' for retry.`);
Logger.error(error);
return;
}
}
Logger.info(`Finished processing transaction '${transaction.transactionNumber}'.`);
transactionUnderProcessing.processingStatus = TransactionProcessingStatus.Processed;
}
/**
* Reverts invalid transactions. Used in the event of a block-reorganization.
*/
private async revertInvalidTransactions () {
// Compute a list of exponentially-spaced transactions with their index, starting from the last transaction of the processed transactions.
const exponentiallySpacedTransactions = await this.transactionStore.getExponentiallySpacedTransactions();
// Find a known valid Sidetree transaction that is prior to the block reorganization.
const bestKnownValidRecentTransaction =
await this.blockchain.getFirstValidTransaction(exponentiallySpacedTransactions);
const bestKnownValidRecentTransactionNumber = bestKnownValidRecentTransaction === undefined ? undefined : bestKnownValidRecentTransaction.transactionNumber;
Logger.info(`Best known valid recent transaction: ${bestKnownValidRecentTransactionNumber}`);
// Revert all processed operations that came after the best known valid recent transaction.
Logger.info('Reverting operations...');
await this.operationStore.delete(bestKnownValidRecentTransactionNumber);
await this.unresolvableTransactionStore.removeUnresolvableTransactionsLaterThan(bestKnownValidRecentTransactionNumber);
// NOTE: MUST do steps below LAST in this particular order to handle incomplete operation rollback due to unexpected scenarios, such as power outage etc.
await this.transactionStore.removeTransactionsLaterThan(bestKnownValidRecentTransactionNumber);
}
} | the_stack |
import { mergeMap, tap, filter } from 'rxjs/operators';
import { IndexedDBDatabase } from '../databases/indexeddb-database';
import { LocalStorageDatabase } from '../databases/localstorage-database';
import { MemoryDatabase } from '../databases/memory-database';
import { DEFAULT_IDB_DB_NAME, DEFAULT_IDB_STORE_NAME, DEFAULT_IDB_DB_VERSION } from '../tokens';
import { clearStorage, closeAndDeleteDatabase } from '../testing/cleaning.spec';
import { StorageMap } from './storage-map.service';
import { VALIDATION_ERROR } from './exceptions';
import { JSONSchema, JSONSchemaNumber } from '../validation/json-schema';
function tests(description: string, localStorageServiceFactory: () => StorageMap): void {
interface Monster {
name: string;
address?: string;
}
const key = 'test';
let storage: StorageMap;
describe(description, () => {
beforeAll(() => {
/* Via a factory as the class should be instancied only now, not before, otherwise tests could overlap */
storage = localStorageServiceFactory();
});
beforeEach((done) => {
/* Clear data to avoid tests overlap */
clearStorage(done, storage);
});
afterAll((done) => {
/* Now that `indexedDB` store name can be customized, it's important:
* - to delete the database after each tests group,
* so the next tests group to will trigger the `indexedDB` `upgradeneeded` event,
* as it's where the store is created
* - to be able to delete the database, all connections to it must be closed */
closeAndDeleteDatabase(done, storage);
});
describe('overloads', () => {
it('no schema / no cast', (done) => {
// @ts-expect-error Failure test
storage.get('test').subscribe((_: number | undefined) => {
expect().nothing();
done();
});
});
it('no schema / cast', (done) => {
// @ts-expect-error Failure test
storage.get<number>('test').subscribe((_: number | undefined) => {
expect().nothing();
done();
});
});
it('schema / cast', (done) => {
storage.get<string>('test', { type: 'string' }).subscribe((_: string | undefined) => {
expect().nothing();
done();
});
});
it('schema with options', (done) => {
storage.get('test', { type: 'number', maximum: 10 }).subscribe((_: number | undefined) => {
expect().nothing();
done();
});
});
it('prepared schema with generic interface', (done) => {
const schema: JSONSchema = { type: 'number' };
storage.get('test', schema).subscribe((_: number | undefined) => {
expect().nothing();
done();
});
});
it('prepared schema with specific interface', (done) => {
const schema: JSONSchemaNumber = { type: 'number' };
storage.get('test', schema).subscribe((_: number | undefined) => {
expect().nothing();
done();
});
});
});
describe(`get()`, () => {
describe(`string`, () => {
it('with value', (done) => {
const value = 'blue';
const schema: JSONSchema = { type: 'string' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: string | undefined) => {
expect(result).toBe(value);
done();
});
});
it('empty', (done) => {
const value = '';
const schema: JSONSchema = { type: 'string' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: string | undefined) => {
expect(result).toBe(value);
done();
});
});
it('const', (done) => {
const value = 'hello';
const schema: JSONSchema = {
type: 'string',
const: 'hello',
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<'hello'>(key, schema))
).subscribe((result: 'hello' | undefined) => {
expect(result).toBe(value);
done();
});
});
it('enum', (done) => {
const value = 'world';
const schema: JSONSchema = {
type: 'string',
enum: ['hello', 'world'],
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<'hello' | 'world'>(key, schema))
).subscribe((result: 'hello' | 'world' | undefined) => {
expect(result).toBe(value);
done();
});
});
});
describe(`number`, () => {
it('with value', (done) => {
const value = 1.5;
const schema: JSONSchema = { type: 'number' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number | undefined) => {
expect(result).toBe(value);
done();
});
});
it('zero', (done) => {
const value = 0;
const schema: JSONSchema = { type: 'number' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number | undefined) => {
expect(result).toBe(value);
done();
});
});
it('const', (done) => {
const value = 1.5;
const schema: JSONSchema = {
type: 'number',
const: 1.5,
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<1.5>(key, schema))
).subscribe((result: 1.5 | undefined) => {
expect(result).toBe(value);
done();
});
});
it('enum', (done) => {
const value = 2.4;
const schema: JSONSchema = {
type: 'number',
enum: [1.5, 2.4],
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<1.5 | 2.4>(key, schema))
).subscribe((result: 1.5 | 2.4 | undefined) => {
expect(result).toBe(value);
done();
});
});
});
describe(`integer`, () => {
it('with value', (done) => {
const value = 1;
const schema: JSONSchema = { type: 'integer' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number | undefined) => {
expect(result).toBe(value);
done();
});
});
it('zero', (done) => {
const value = 0;
const schema: JSONSchema = { type: 'integer' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number | undefined) => {
expect(result).toBe(value);
done();
});
});
it('const', (done) => {
const value = 1;
const schema: JSONSchema = {
type: 'integer',
const: 1,
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<1>(key, schema))
).subscribe((result: 1 | undefined) => {
expect(result).toBe(value);
done();
});
});
it('enum', (done) => {
const value = 2;
const schema: JSONSchema = {
type: 'integer',
enum: [1, 2],
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<1 | 2>(key, schema))
).subscribe((result: 1 | 2 | undefined) => {
expect(result).toBe(value);
done();
});
});
});
describe(`boolean`, () => {
it('true', (done) => {
const value = true;
const schema: JSONSchema = { type: 'boolean' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: boolean | undefined) => {
expect(result).toBe(value);
done();
});
});
it('false', (done) => {
const value = false;
const schema: JSONSchema = { type: 'boolean' };
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: boolean | undefined) => {
expect(result).toBe(value);
done();
});
});
it('const', (done) => {
const value = true;
const schema: JSONSchema = {
type: 'boolean',
const: true,
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<true>(key, schema))
).subscribe((result: true | undefined) => {
expect(result).toBe(value);
done();
});
});
});
describe('array', () => {
it('of strings', (done) => {
const value = ['hello', 'world', '!'];
const schema = {
type: 'array',
items: { type: 'string' },
} as const;
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: string[] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('of integers', (done) => {
const value = [1, 2, 3];
const schema = {
type: 'array',
items: { type: 'integer' },
} as const;
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number[] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('of numbers', (done) => {
const value = [1.5, 2.4, 3.67];
const schema = {
type: 'array',
items: { type: 'number' },
} as const;
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: number[] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('of booleans', (done) => {
const value = [true, false, true];
const schema = {
type: 'array',
items: { type: 'boolean' },
} as const;
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result: boolean[] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('of arrays', (done) => {
const value = [['hello', 'world'], ['my', 'name'], ['is', 'Elmo']];
const schema: JSONSchema = {
type: 'array',
items: {
type: 'array',
items: { type: 'string' },
},
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<string[][]>(key, schema))
).subscribe((result: string[][] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('of objects', (done) => {
const value = [{
name: 'Elmo',
address: 'Sesame street',
}, {
name: 'Cookie',
}, {
name: 'Chester',
}];
const schema: JSONSchema = {
type: 'array',
items: {
type: 'object',
properties: {
name: { type: 'string' },
address: { type: 'string' },
},
required: ['name'],
},
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<Monster[]>(key, schema))
).subscribe((result: Monster[] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('Set', (done) => {
const array = ['hello', 'world'];
const value = new Set<string>(['hello', 'world']);
const schema = {
type: 'array',
items: { type: 'string' },
uniqueItems: true,
} as const;
storage.set(key, Array.from(value), schema).pipe(
mergeMap(() => storage.get(key, schema)),
).subscribe((result: string[] | undefined) => {
expect(result).toEqual(array);
done();
});
});
it('tuple', (done) => {
const value: [string, Monster] = ['hello', {
name: 'Elmo',
address: 'Sesame street',
}];
const schema: JSONSchema = {
type: 'array',
items: [{
type: 'string'
}, {
type: 'object',
properties: {
name: { type: 'string' },
address: { type: 'string' },
},
required: ['name'],
}],
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<[string, Monster]>(key, schema))
).subscribe((result: [string, Monster] | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('Map', (done) => {
const array: [string, Monster][] = [
['Elmo', {
name: 'Elmo',
address: 'Sesame street',
}],
['Cookie', {
name: 'Cookie',
}],
];
const value = new Map<string, Monster>(array);
const schema: JSONSchema = {
type: 'array',
items: {
type: 'array',
items: [{
type: 'string'
}, {
type: 'object',
properties: {
name: { type: 'string' },
address: { type: 'string' },
},
required: ['name'],
}],
},
};
storage.set(key, Array.from(value), schema).pipe(
mergeMap(() => storage.get<[string, Monster][]>(key, schema)),
).subscribe((result: [string, Monster][] | undefined) => {
expect(result).toEqual(array);
done();
});
});
});
describe('object', () => {
it('with all subtypes', (done) => {
interface User {
name: string;
age: number;
philosopher: boolean;
books: string[];
family: {
brothers: number;
sisters: number;
};
creditCard?: number;
}
const value: User = {
name: 'Henri Bergson',
age: 81,
philosopher: true,
books: [`Essai sur les données immédiates de la conscience`, `Matière et mémoire`],
family: {
brothers: 5,
sisters: 3,
},
};
const schema: JSONSchema = {
type: 'object',
properties: {
name: { type: 'string' },
age: { type: 'number' },
philosopher: { type: 'boolean' },
books: {
type: 'array',
items: { type: 'string' },
},
family: {
type: 'object',
properties: {
brothers: { type: 'integer' },
sisters: { type: 'integer' },
},
required: ['brothers', 'sisters']
},
creditCard: { type: 'number' },
},
required: ['name', 'age', 'philosopher', 'books', 'family'],
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<User>(key, schema))
).subscribe((result: User | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('without required properties', (done) => {
interface User {
name?: string;
age?: number;
}
const value: User = {
name: 'Henri Bergson',
};
const schema: JSONSchema = {
type: 'object',
properties: {
name: { type: 'string' },
age: { type: 'number' },
},
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<User>(key, schema))
).subscribe((result: User | undefined) => {
expect(result).toEqual(value);
done();
});
});
it('objects / cast / no schema', (done) => {
interface Test {
test: string;
}
// @ts-expect-error Failure test
storage.get<Test>('test').subscribe((_: Test | undefined) => {
expect().nothing();
done();
});
});
it('objects / no cast / schema', (done) => {
storage.get('test', {
type: 'object',
properties: {
test: { type: 'string' }
}
// @ts-expect-error Failure test
}).subscribe((_: Test | undefined) => {
expect().nothing();
done();
});
});
});
describe('specials', () => {
it('unexisting key', (done) => {
const schema: JSONSchema = { type: 'string' };
storage.get(`unknown${Date.now()}`, schema).subscribe((data: string | undefined) => {
expect(data).toBeUndefined();
done();
});
});
it('null', (done) => {
const schema: JSONSchema = { type: 'string' };
storage.set(key, 'test', schema).pipe(
mergeMap(() => storage.set(key, null, schema)),
mergeMap(() => storage.get(key, schema)),
).subscribe((result: string | undefined) => {
expect(result).toBeUndefined();
done();
});
});
it('undefined', (done) => {
const schema: JSONSchema = { type: 'string' };
storage.set(key, 'test', schema).pipe(
mergeMap(() => storage.set(key, undefined, schema)),
mergeMap(() => storage.get(key, schema)),
).subscribe((result: string | undefined) => {
expect(result).toBeUndefined();
done();
});
});
it('blob (will be pending in Safari private)', (done) => {
const value = new Blob();
storage.set(key, value).pipe(
mergeMap(() => storage.get(key))
).subscribe((storage.backingEngine === 'localStorage') ? {
next: () => {
// Nothing to do
},
error: () => {
expect().nothing();
done();
}
} : {
next: (result: unknown | undefined) => {
expect(result).toEqual(value);
done();
},
error: () => {
/* Safari in private mode doesn't allow to store `Blob` in `indexedDB` */
pending();
done();
}
});
});
it('heavy schema', (done) => {
interface City {
country: string;
population: number;
coordinates: [number, number];
monuments?: {
name: string;
constructionYear?: number;
}[];
}
const value: [string, City][] = [
['Paris', {
country: 'France',
population: 2187526,
coordinates: [48.866667, 2.333333],
monuments: [{
name: `Tour Eiffel`,
constructionYear: 1889,
}, {
name: `Notre-Dame de Paris`,
constructionYear: 1345,
}],
}],
['Kyōto', {
country: 'Japan',
population: 1467702,
coordinates: [35.011665, 135.768326],
monuments: [{
name: `Sanjūsangen-dō`,
constructionYear: 1164,
}],
}],
];
const schema: JSONSchema = {
type: 'array',
items: {
type: 'array',
items: [{
type: 'string'
}, {
type: 'object',
properties: {
country: { type: 'string' },
population: { type: 'integer' },
coordinates: {
type: 'array',
items: [
{ type: 'number'},
{ type: 'number'},
],
},
monuments: {
type: 'array',
items: {
type: 'object',
properties: {
name: { type: 'string' },
constructionYear: { type: 'integer' },
},
required: ['name'],
},
},
},
required: ['country', 'population', 'coordinates'],
}]
},
};
storage.set(key, value, schema).pipe(
mergeMap(() => storage.get<[string, City][]>(key, schema)),
).subscribe((result: [string, City][] | undefined) => {
expect(result).toEqual(value);
done();
});
});
});
});
describe('set()', () => {
it('update', (done) => {
const schema: JSONSchema = { type: 'string' };
storage.set(key, 'value', schema).pipe(
mergeMap(() => storage.set(key, 'updated', schema))
).subscribe(() => {
expect().nothing();
done();
});
});
it('concurrency', (done) => {
const value1 = 'test1';
const value2 = 'test2';
const schema: JSONSchema = { type: 'string' };
expect(() => {
storage.set(key, value1, schema).subscribe();
storage.set(key, value2, schema).pipe(
mergeMap(() => storage.get(key, schema))
).subscribe((result) => {
expect(result).toBe(value2);
done();
});
}).not.toThrow();
});
});
describe('deletion', () => {
it('delete() with existing key', (done) => {
storage.set(key, 'test').pipe(
mergeMap(() => storage.delete(key)),
mergeMap(() => storage.get(key))
).subscribe((result) => {
expect(result).toBeUndefined();
done();
});
});
it('delete() with unexisting key', (done) => {
storage.delete(`unexisting${Date.now()}`).subscribe(() => {
expect().nothing();
done();
});
});
it('clear()', (done) => {
storage.set(key, 'test').pipe(
mergeMap(() => storage.clear()),
mergeMap(() => storage.get(key))
).subscribe((result) => {
expect(result).toBeUndefined();
done();
});
});
});
describe('Map-like API', () => {
it('size', (done) => {
storage.size.pipe(
tap((length) => { expect(length).toBe(0); }),
mergeMap(() => storage.set(key, 'test')),
mergeMap(() => storage.size),
tap((length) => { expect(length).toBe(1); }),
mergeMap(() => storage.set('', 'test')),
mergeMap(() => storage.size),
tap((length) => { expect(length).toBe(2); }),
mergeMap(() => storage.delete(key)),
mergeMap(() => storage.size),
tap((length) => { expect(length).toBe(1); }),
mergeMap(() => storage.clear()),
mergeMap(() => storage.size),
tap((length) => { expect(length).toBe(0); }),
).subscribe(() => {
done();
});
});
it('keys()', (done) => {
const key1 = 'index1';
const key2 = 'index2';
const keys = [key1, key2];
storage.set(key1, 'test').pipe(
mergeMap(() => storage.set(key2, 'test')),
mergeMap(() => storage.keys()),
).subscribe({
next: (value) => {
expect(keys).toContain(value);
keys.splice(keys.indexOf(value), 1);
},
complete: () => {
done();
},
});
});
it('keys() when no items', (done) => {
storage.keys().subscribe({
next: () => {
fail();
},
complete: () => {
expect().nothing();
done();
},
});
});
it('has() on existing', (done) => {
storage.set(key, 'test').pipe(
mergeMap(() => storage.has(key))
).subscribe((result) => {
expect(result).toBe(true);
done();
});
});
it('has() on unexisting', (done) => {
storage.has(`nokey${Date.now()}`).subscribe((result) => {
expect(result).toBe(false);
done();
});
});
it('advanced case: remove only some items', (done) => {
storage.set('user_firstname', 'test').pipe(
mergeMap(() => storage.set('user_lastname', 'test')),
mergeMap(() => storage.set('app_data1', 'test')),
mergeMap(() => storage.set('app_data2', 'test')),
mergeMap(() => storage.keys()),
filter((currentKey) => currentKey.startsWith('app_')),
mergeMap((currentKey) => storage.delete(currentKey)),
).subscribe({
/* So we need to wait for completion of all actions to check */
complete: () => {
storage.size.subscribe((size) => {
expect(size).toBe(2);
done();
});
}
});
});
});
describe('watch()', () => {
it('valid', (done) => {
const watchedKey = 'watched1';
const values = [undefined, 'test1', undefined, 'test2', undefined];
const schema: JSONSchema = { type: 'string' };
let i = 0;
storage.watch(watchedKey, schema).subscribe((result: string | undefined) => {
expect(result).toBe(values[i]);
i += 1;
if (i === 1) {
storage.set(watchedKey, values[1], schema).pipe(
mergeMap(() => storage.delete(watchedKey)),
mergeMap(() => storage.set(watchedKey, values[3], schema)),
mergeMap(() => storage.clear()),
).subscribe();
}
if (i === values.length) {
done();
}
});
});
});
describe('validation', () => {
interface Test {
expected: string;
}
const schema: JSONSchema = {
type: 'object',
properties: {
expected: {
type: 'string'
}
},
required: ['expected']
};
it('valid schema with options', (done) => {
const value = 5;
const schemaWithOptions: JSONSchema = { type: 'number', maximum: 10 };
storage.set(key, value, schemaWithOptions).pipe(
mergeMap(() => storage.get(key, schemaWithOptions)),
).subscribe((result: number | undefined) => {
expect(result).toBe(value);
done();
});
});
it('invalid schema with options', (done) => {
const value = 15;
const schemaWithOptions: JSONSchema = { type: 'number', maximum: 10 };
storage.set(key, value, { type: 'number' }).pipe(
mergeMap(() => storage.get(key, schemaWithOptions)),
).subscribe({
error: (error) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
expect(error.message).toBe(VALIDATION_ERROR);
done();
}
});
});
it('invalid in get()', (done) => {
storage.set(key, 'test', { type: 'string' }).pipe(
mergeMap(() => storage.get<Test>(key, schema))
).subscribe({ error: (error) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
expect(error.message).toBe(VALIDATION_ERROR);
done();
} });
});
it('invalid in set()', (done) => {
storage.set(key, 'test', schema).subscribe({
error: (error) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
expect(error.message).toBe(VALIDATION_ERROR);
done();
},
});
});
it('invalid in watch()', (done) => {
const watchedKey = 'watched2';
storage.set(watchedKey, 'test', { type: 'string' }).subscribe(() => {
storage.watch(watchedKey, { type: 'number' }).subscribe({
error: () => {
expect().nothing();
done();
}
});
});
});
it('null: no validation', (done) => {
storage.get<string>(`noassociateddata${Date.now()}`, schema).subscribe(() => {
expect().nothing();
done();
});
});
});
/* Avoid https://github.com/cyrilletuzi/angular-async-local-storage/issues/25
* Avoid https://github.com/cyrilletuzi/angular-async-local-storage/issues/5 */
describe('complete', () => {
const schema: JSONSchema = { type: 'string' };
it('get()', (done) => {
storage.get(key, schema).subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('set()', (done) => {
storage.set('index', 'value', schema).subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('delete()', (done) => {
storage.delete(key).subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('clear()', (done) => {
storage.clear().subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('size', (done) => {
storage.size.subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('keys()', (done) => {
storage.keys().subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
it('has()', (done) => {
storage.has(key).subscribe({
complete: () => {
expect().nothing();
done();
}
});
});
});
describe('compatibility with Promise', () => {
const schema: JSONSchema = { type: 'string' };
it('Promise', (done) => {
const value = 'test';
storage.set(key, value, schema).toPromise()
.then(() => storage.get(key, schema).toPromise())
.then((result: string | undefined) => {
expect(result).toBe(value);
done();
})
.catch(() => {
fail();
});
});
it('async / await', async () => {
const value = 'test';
await storage.set(key, value, schema).toPromise();
const result: string | undefined = await storage.get(key, schema).toPromise();
expect(result).toBe(value);
});
});
});
}
describe('StorageMap', () => {
tests('memory', () => new StorageMap(new MemoryDatabase()));
tests('localStorage', () => new StorageMap(new LocalStorageDatabase()));
tests('localStorage with prefix', () => new StorageMap(new LocalStorageDatabase(`ls`)));
tests('indexedDB', () => new StorageMap(new IndexedDBDatabase()));
tests('indexedDB with custom options', () => new StorageMap(new IndexedDBDatabase('customDbTest', 'storeTest', 2, false)));
describe('browser APIs', () => {
/* Avoid https://github.com/cyrilletuzi/angular-async-local-storage/issues/57 */
it('IndexedDb is used (will be pending in Firefox/IE private mode)', (done) => {
const index = `test${Date.now()}`;
const value = 'test';
const localStorageService = new StorageMap(new IndexedDBDatabase());
localStorageService.set(index, value).subscribe(() => {
try {
const dbOpen = indexedDB.open(DEFAULT_IDB_DB_NAME, DEFAULT_IDB_DB_VERSION);
dbOpen.addEventListener('success', () => {
const store = dbOpen.result.transaction([DEFAULT_IDB_STORE_NAME], 'readonly').objectStore(DEFAULT_IDB_STORE_NAME);
const request = store.get(index);
request.addEventListener('success', () => {
expect(request.result).toBe(value);
dbOpen.result.close();
closeAndDeleteDatabase(done, localStorageService);
});
request.addEventListener('error', () => {
dbOpen.result.close();
/* This case is not supposed to happen */
fail();
});
});
dbOpen.addEventListener('error', () => {
/* Cases : Firefox private mode where `indexedDb` exists but fails */
pending();
});
} catch {
/* Cases : IE private mode where `indexedDb` will exist but not its `open()` method */
pending();
}
});
});
it('indexedDb with default options (will be pending in Firefox private mode)', (done) => {
const localStorageService = new StorageMap(new IndexedDBDatabase());
/* Do a request first as a first transaction is needed to set the store name */
localStorageService.get('test').subscribe(() => {
if (localStorageService.backingEngine === 'indexedDB') {
const { database, store, version } = localStorageService.backingStore;
expect(database).toBe(DEFAULT_IDB_DB_NAME);
expect(store).toBe(DEFAULT_IDB_STORE_NAME);
expect(version).toBe(DEFAULT_IDB_DB_VERSION);
closeAndDeleteDatabase(done, localStorageService);
} else {
/* Cases: Firefox private mode */
pending();
}
});
});
/* Avoid https://github.com/cyrilletuzi/angular-async-local-storage/issues/57 */
it('indexedDb with noWrap to false (will be pending in Firefox/IE private mode)', (done) => {
const index = `wrap${Date.now()}`;
const value = 'test';
const localStorageService = new StorageMap(new IndexedDBDatabase(undefined, undefined, undefined, false));
localStorageService.set(index, value).subscribe(() => {
try {
const dbOpen = indexedDB.open(DEFAULT_IDB_DB_NAME, DEFAULT_IDB_DB_VERSION);
dbOpen.addEventListener('success', () => {
const store = dbOpen.result.transaction([DEFAULT_IDB_STORE_NAME], 'readonly').objectStore(DEFAULT_IDB_STORE_NAME);
const request = store.get(index);
request.addEventListener('success', () => {
expect(request.result).toEqual({ value });
dbOpen.result.close();
closeAndDeleteDatabase(done, localStorageService);
});
request.addEventListener('error', () => {
dbOpen.result.close();
/* This case is not supposed to happen */
fail();
});
});
dbOpen.addEventListener('error', () => {
/* Cases : Firefox private mode where `indexedDb` exists but fails */
pending();
});
} catch {
/* Cases : IE private mode where `indexedDb` will exist but not its `open()` method */
pending();
}
});
});
it('indexedDb with custom options (will be pending in Firefox private mode)', (done) => {
/* Unique names to be sure `indexedDB` `upgradeneeded` event is triggered */
const dbName = `dbCustom${Date.now()}`;
const storeName = `storeCustom${Date.now()}`;
const dbVersion = 2;
const noWrap = false;
const localStorageService = new StorageMap(new IndexedDBDatabase(dbName, storeName, dbVersion, noWrap));
/* Do a request first as a first transaction is needed to set the store name */
localStorageService.get('test').subscribe(() => {
if (localStorageService.backingEngine === 'indexedDB') {
const { database, store, version } = localStorageService.backingStore;
expect(database).toBe(dbName);
expect(store).toBe(storeName);
expect(version).toBe(dbVersion);
closeAndDeleteDatabase(done, localStorageService);
} else {
/* Cases: Firefox private mode */
pending();
}
});
});
it('localStorage with prefix', () => {
const prefix = `ls_`;
const localStorageService = new StorageMap(new LocalStorageDatabase(prefix));
expect(localStorageService.fallbackBackingStore.prefix).toBe(prefix);
});
});
}); | the_stack |
import * as tf from '@tensorflow/tfjs';
import {backend_util, BackendTimingInfo, DataId, DataType, KernelBackend, ModelTensorInfo, Rank, Scalar, scalar, ScalarLike, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, TensorInfo, tidy, util} from '@tensorflow/tfjs';
import {isArray, isNullOrUndefined} from 'util';
import {encodeInt32ArrayAsInt64, Int64Scalar} from './int64_tensors';
import {TensorMetadata, TFEOpAttr, TFJSBinding} from './tfjs_binding';
// tslint:disable-next-line:no-require-imports
const messages = require('./proto/api_pb');
type TensorData = {
shape: number[],
dtype: number,
values: backend_util.BackendValues,
id: number,
refCount: number;
};
export class NodeJSKernelBackend extends KernelBackend {
binding: TFJSBinding;
isGPUPackage: boolean;
isUsingGpuDevice: boolean;
private tensorMap: tf.DataStorage<TensorData>;
constructor(binding: TFJSBinding, packageName: string) {
super();
this.binding = binding;
this.isGPUPackage = packageName === '@tensorflow/tfjs-node-gpu';
this.isUsingGpuDevice = this.binding.isUsingGpuDevice();
this.tensorMap = new tf.DataStorage<TensorData>(this, tf.engine());
}
getDTypeInteger(dtype: DataType): number {
switch (dtype) {
case 'float32':
return this.binding.TF_FLOAT;
case 'int32':
return this.binding.TF_INT32;
case 'bool':
return this.binding.TF_BOOL;
case 'complex64':
return this.binding.TF_COMPLEX64;
case 'string':
return this.binding.TF_STRING;
default:
throw new Error(`Unsupported DType: ${dtype}`);
}
}
private typeAttributeFromTensor(value: Tensor): number {
return this.getDTypeInteger(value.dtype);
}
// Creates a new Tensor and maps the dataId to the passed in ID.
private createOutputTensor(metadata: TensorMetadata): Tensor {
const newId = {};
this.tensorMap.set(newId, {
shape: metadata.shape,
dtype: metadata.dtype,
id: metadata.id,
values: null,
refCount: 1
});
let dtype: DataType;
switch (metadata.dtype) {
case this.binding.TF_FLOAT:
dtype = 'float32';
break;
case this.binding.TF_INT32:
dtype = 'int32';
break;
case this.binding.TF_INT64:
console.warn('INT64 output tensor will be stored as BigInt64Array.');
// INT64 is not supported in TFJS yet, cast it to int32.
dtype = 'int32';
break;
case this.binding.TF_BOOL:
dtype = 'bool';
break;
case this.binding.TF_COMPLEX64:
dtype = 'complex64';
break;
case this.binding.TF_STRING:
dtype = 'string';
break;
case this.binding.TF_RESOURCE:
// NOTE(cais): We currently represent resource-type Tensors
// as string of ubytes.
dtype = 'string';
break;
case this.binding.TF_UINT8:
// TensorFlow uses UINT8 as dtype for image tensor. UINT8 is not
// supported in TFJS yet, cast it to int32.
dtype = 'int32';
break;
default:
throw new Error(`Unknown dtype enum ${metadata.dtype}`);
}
// TODO(yassogba) Enable this once all the kernels are removed from backend.
// We can then change the return type from Tensor to TensorInfo.
// return {dataId: newId, shape: metadata.shape, dtype};
return tf.engine().makeTensorFromDataId(newId, metadata.shape, dtype);
}
// Prepares Tensor instances for Op execution.
private getInputTensorIds(tensors: Array<TensorInfo|Int64Scalar>): number[] {
const ids: number[] = [];
for (let i = 0; i < tensors.length; i++) {
if (tensors[i] instanceof Int64Scalar) {
// Then `tensors[i]` is a Int64Scalar, which we currently represent
// using an `Int32Array`.
const value = (tensors[i] as Int64Scalar).valueArray;
const id = this.binding.createTensor([], this.binding.TF_INT64, value);
ids.push(id);
} else {
const info = this.tensorMap.get((tensors[i] as TensorInfo).dataId);
// TODO - what about ID in this case? Handle in write()??
if (info.values != null) {
// Values were delayed to write into the TensorHandle. Do that before
// Op execution and clear stored values.
info.id =
this.binding.createTensor(info.shape, info.dtype, info.values);
info.values = null;
}
ids.push(info.id);
}
}
return ids;
}
createReductionOpAttrs(tensor: TensorInfo, keepDims = false): TFEOpAttr[] {
return [
{name: 'keep_dims', type: this.binding.TF_ATTR_BOOL, value: keepDims},
createTensorsTypeOpAttr('T', tensor.dtype),
createTensorsTypeOpAttr('Tidx', 'int32')
];
}
floatPrecision(): 16|32 {
return 32;
}
epsilon(): number {
return super.epsilon();
}
/**
* Executes an op that has a single input and output.
*
* Helper function to wrap executeSingleOutput in a particular case.
* @param name The name of the Op to execute.
* @param input The input Tensor for the Op.
*/
executeSingleInput(name: string, input: TensorInfo): Tensor {
const opAttrs = [createTensorsTypeOpAttr('T', input.dtype)];
return this.executeSingleOutput(name, opAttrs, [input]);
}
/**
* Executes a TensorFlow Eager Op that provides one output Tensor.
* @param name The name of the Op to execute.
* @param opAttrs The list of Op attributes required to execute.
* @param inputs The list of input Tensors for the Op.
* @return A resulting Tensor from Op execution.
*/
executeSingleOutput(name: string, opAttrs: TFEOpAttr[], inputs: TensorInfo[]):
Tensor {
const outputMetadata = this.binding.executeOp(
name, opAttrs, this.getInputTensorIds(inputs), 1);
return this.createOutputTensor(outputMetadata[0]);
}
/**
* Executes a TensorFlow Eager Op that provides multiple output Tensors.
* @param name The name of the Op to execute.
* @param opAttrs The list of Op attributes required to execute.
* @param inputs The list of input Tensors for the Op.
* @param numOutputs The number of output Tensors for Op execution.
* @return A resulting Tensor array from Op execution.
*/
executeMultipleOutputs(
name: string, opAttrs: TFEOpAttr[], inputs: TensorInfo[],
numOutputs: number): Tensor[] {
const outputMetadata = this.binding.executeOp(
name, opAttrs, this.getInputTensorIds(inputs), numOutputs);
return outputMetadata.map(m => this.createOutputTensor(m));
}
numDataIds(): number {
return this.tensorMap.numDataIds();
}
dispose(): void {}
async read(dataId: DataId): Promise<backend_util.BackendValues> {
return this.readSync(dataId);
}
readSync(dataId: DataId): backend_util.BackendValues {
if (!this.tensorMap.has(dataId)) {
throw new Error(`Tensor ${dataId} was not registered!`);
}
const info = this.tensorMap.get(dataId);
if (info.values != null) {
return info.values;
} else {
return this.binding.tensorDataSync(info.id);
}
}
/**
* Dispose the memory if the dataId has 0 refCount. Return true if the memory
* is released, false otherwise.
* @param dataId
* @oaram force Optional, remove the data regardless of refCount
*/
disposeData(dataId: DataId, force = false): boolean {
// No-op if already disposed.
if (this.tensorMap.has(dataId)) {
const id = this.tensorMap.get(dataId).id;
this.tensorMap.get(dataId).refCount--;
if (!force && this.tensorMap.get(dataId).refCount > 0) {
return false;
}
if (id != null && id >= 0) {
this.binding.deleteTensor(id);
}
this.tensorMap.delete(dataId);
}
return true;
}
/** Return refCount of a `TensorData`. */
refCount(dataId: DataId): number {
if (this.tensorMap.has(dataId)) {
const tensorData = this.tensorMap.get(dataId);
return tensorData.refCount;
}
return 0;
}
incRef(dataId: DataId) {
this.tensorMap.get(dataId).refCount++;
}
move(
dataId: DataId, values: backend_util.BackendValues, shape: number[],
dtype: DataType, refCount: number): void {
this.tensorMap.set(
dataId, {shape, dtype: getTFDType(dtype), values, id: -1, refCount});
}
write(values: backend_util.BackendValues, shape: number[], dtype: DataType):
DataId {
const dataId = {};
this.move(dataId, values, shape, dtype, 1);
return dataId;
}
applyActivation<T extends Tensor>(
input: T, activation: string, preluActivationWeights?: Tensor,
leakyreluAlpha?: number): T {
let result = input;
if (activation != null) {
if (activation === 'linear') {
// No-op
} else if (activation === 'relu') {
result = tf.relu(result);
} else if (activation === 'prelu') {
result = tf.prelu(result, preluActivationWeights) as T;
} else if (activation === 'leakyrelu') {
result = tf.leakyRelu(result, leakyreluAlpha);
} else if (activation === 'elu') {
result = tf.elu(result);
} else if (activation === 'relu6') {
result = tf.relu6(result);
} else if (activation === 'sigmoid') {
result = tf.sigmoid(result);
} else {
throw new Error(`Activation: ${
activation} has not been implemented for the Node.js backend`);
}
}
return result;
}
divide(a: Tensor, b: Tensor): Tensor {
const opAttrs = [createTensorsTypeOpAttr(
'T', backend_util.upcastType(a.dtype, b.dtype))];
return this.executeSingleOutput('Div', opAttrs, [a, b]);
}
divNoNan(a: Tensor, b: Tensor): Tensor {
const opAttrs = [createTensorsTypeOpAttr(
'T', backend_util.upcastType(a.dtype, b.dtype))];
return this.executeSingleOutput('DivNoNan', opAttrs, [a, b]);
}
where(condition: Tensor): Tensor2D {
return this.executeSingleOutput('Where', [], [condition]) as Tensor2D;
}
topKValues<T extends Tensor>(x: T, k: number): Tensor1D {
throw new Error('Method not implemented.');
}
topKIndices(x: Tensor, k: number): Tensor1D {
throw new Error('Method not implemented.');
}
int<T extends Tensor>(x: T): T {
throw new Error('Method not implemented.');
}
decodeJpeg(
contents: Uint8Array, channels: number, ratio: number,
fancyUpscaling: boolean, tryRecoverTruncated: boolean,
acceptableFraction: number, dctMethod: string): Tensor3D {
const opAttrs = [
{name: 'channels', type: this.binding.TF_ATTR_INT, value: channels},
{name: 'ratio', type: this.binding.TF_ATTR_INT, value: ratio}, {
name: 'fancy_upscaling',
type: this.binding.TF_ATTR_BOOL,
value: fancyUpscaling
},
{
name: 'try_recover_truncated',
type: this.binding.TF_ATTR_BOOL,
value: tryRecoverTruncated
},
{
name: 'acceptable_fraction',
type: this.binding.TF_ATTR_FLOAT,
value: acceptableFraction
},
{name: 'dct_method', type: this.binding.TF_ATTR_STRING, value: dctMethod}
];
const inputArgs = [scalar(contents, 'string')];
return this.executeSingleOutput('DecodeJpeg', opAttrs, inputArgs) as
Tensor<Rank.R3>;
}
decodePng(contents: Uint8Array, channels: number): Tensor3D {
const opAttrs =
[{name: 'channels', type: this.binding.TF_ATTR_INT, value: channels}];
const inputArgs = [scalar(contents, 'string')];
return this.executeSingleOutput('DecodePng', opAttrs, inputArgs) as
Tensor<Rank.R3>;
}
decodeBmp(contents: Uint8Array, channels: number): Tensor3D {
const opAttrs =
[{name: 'channels', type: this.binding.TF_ATTR_INT, value: channels}];
const inputArgs = [scalar(contents, 'string')];
return this.executeSingleOutput('DecodeBmp', opAttrs, inputArgs) as
Tensor<Rank.R3>;
}
decodeGif(contents: Uint8Array): Tensor4D {
const inputArgs = [scalar(contents, 'string')];
return this.executeSingleOutput('DecodeGif', [], inputArgs) as
Tensor<Rank.R4>;
}
executeEncodeImageOp(
name: string, opAttrs: TFEOpAttr[], imageData: Uint8Array,
imageShape: number[]): Tensor {
const inputTensorId =
this.binding.createTensor(imageShape, this.binding.TF_UINT8, imageData);
const outputMetadata =
this.binding.executeOp(name, opAttrs, [inputTensorId], 1);
const outputTensorInfo = outputMetadata[0];
// prevent the tensor data from being converted to a UTF8 string, since
// the encoded data is not valid UTF8
outputTensorInfo.dtype = this.binding.TF_UINT8;
return this.createOutputTensor(outputTensorInfo);
}
encodeJpeg(
imageData: Uint8Array, imageShape: number[], format: ''|'grayscale'|'rgb',
quality: number, progressive: boolean, optimizeSize: boolean,
chromaDownsampling: boolean, densityUnit: 'in'|'cm', xDensity: number,
yDensity: number, xmpMetadata: string): Tensor {
const opAttrs = [
{name: 'format', type: this.binding.TF_ATTR_STRING, value: format},
{name: 'quality', type: this.binding.TF_ATTR_INT, value: quality}, {
name: 'progressive',
type: this.binding.TF_ATTR_BOOL,
value: progressive
},
{
name: 'optimize_size',
type: this.binding.TF_ATTR_BOOL,
value: optimizeSize
},
{
name: 'chroma_downsampling',
type: this.binding.TF_ATTR_BOOL,
value: chromaDownsampling
},
{
name: 'density_unit',
type: this.binding.TF_ATTR_STRING,
value: densityUnit
},
{name: 'x_density', type: this.binding.TF_ATTR_INT, value: xDensity},
{name: 'y_density', type: this.binding.TF_ATTR_INT, value: yDensity}, {
name: 'xmp_metadata',
type: this.binding.TF_ATTR_STRING,
value: xmpMetadata
}
];
return this.executeEncodeImageOp(
'EncodeJpeg', opAttrs, imageData, imageShape);
}
encodePng(imageData: Uint8Array, imageShape: number[], compression: number):
Tensor {
const opAttrs = [
{name: 'compression', type: this.binding.TF_ATTR_INT, value: compression}
];
return this.executeEncodeImageOp(
'EncodePng', opAttrs, imageData, imageShape);
}
deleteSavedModel(id: number): void {
this.binding.deleteSavedModel(id);
}
loadSavedModelMetaGraph(path: string, tags: string): number {
return this.binding.loadSavedModel(path, tags);
}
private getMappedInputTensorIds(
inputs: Tensor[], inputTensorInfos: ModelTensorInfo[]) {
const tensorIds = this.getInputTensorIds(inputs);
for (let i = 0; i < inputs.length; i++) {
if (inputTensorInfos[i] != null) {
if (inputTensorInfos[i].tfDtype === 'DT_UINT8') {
const data = Uint8Array.from(inputs[i].dataSync());
const inputTensorId = this.binding.createTensor(
inputs[i].shape, this.binding.TF_UINT8, data);
tensorIds[i] = inputTensorId;
} else if (inputTensorInfos[i].tfDtype === 'DT_INT64') {
const data =
encodeInt32ArrayAsInt64(inputs[i].dataSync() as Int32Array);
const inputTensorId = this.binding.createTensor(
inputs[i].shape, this.binding.TF_INT64, data);
tensorIds[i] = inputTensorId;
}
}
}
return tensorIds;
}
runSavedModel(
id: number, inputs: Tensor[], inputTensorInfos: ModelTensorInfo[],
outputOpNames: string[]): Tensor[] {
const outputMetadata = this.binding.runSavedModel(
id, this.getMappedInputTensorIds(inputs, inputTensorInfos),
inputTensorInfos.map(info => info.name).join(','),
outputOpNames.join(','));
return outputMetadata.map(m => this.createOutputTensor(m));
}
// ------------------------------------------------------------
// TensorBoard-related (tfjs-node-specific) backend kernels.
summaryWriter(logdir: string): Tensor1D {
const opAttrs = [
{
name: 'shared_name',
type: this.binding.TF_ATTR_STRING,
value: `logdir:${logdir}`
},
{name: 'container', type: this.binding.TF_ATTR_STRING, value: ''}
];
const writerResource =
this.executeSingleOutput('SummaryWriter', opAttrs, []);
return writerResource as Tensor1D;
}
createSummaryFileWriter(
resourceHandle: Tensor, logdir: string, maxQueue?: number,
flushMillis?: number, filenameSuffix?: string): void {
const inputArgs = [
resourceHandle, scalar(logdir),
scalar(maxQueue == null ? 10 : maxQueue, 'int32'),
scalar(flushMillis == null ? 2 * 60 * 1000 : flushMillis, 'int32'),
scalar(filenameSuffix == null ? '.v2' : filenameSuffix)
];
this.executeMultipleOutputs('CreateSummaryFileWriter', [], inputArgs, 0);
}
writeScalarSummary(
resourceHandle: Tensor, step: number, name: string,
value: Scalar|number): void {
tidy(() => {
util.assert(
Number.isInteger(step),
() => `step is expected to be an integer, but is instead ${step}`);
const inputArgs: Array<Tensor|Int64Scalar> =
[resourceHandle, new Int64Scalar(step), scalar(name, 'string')];
let typeAttr: number;
if (typeof value === 'number') {
inputArgs.push(scalar(value));
typeAttr = this.binding.TF_FLOAT;
} else {
// `value` is a Scalar.
util.assert(
value.rank === 0,
() => `A non-scalar tensor (rank ${value.rank}) is passed to ` +
`writeScalarSummary()`);
inputArgs.push(value);
typeAttr = this.typeAttributeFromTensor(value);
}
const opAttrs: TFEOpAttr[] =
[{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}];
this.binding.executeOp(
'WriteScalarSummary', opAttrs, this.getInputTensorIds(inputArgs), 0);
});
}
writeHistogramSummary(
resourceHandle: Tensor, step: number, name: string, data: Tensor,
bucketCount: number|undefined, description: string|undefined): void {
tidy(() => {
util.assert(
Number.isInteger(step),
() => `step is expected to be an integer, but is instead ${step}`);
// We use the WriteSummary op, and not WriteHistogramSummary. The
// difference is that WriteHistogramSummary takes a tensor of any shape,
// and places the values in 30 buckets, while WriteSummary expects a
// tensor which already describes the bucket widths and counts.
//
// If we were to use WriteHistogramSummary, we wouldn't have to implement
// the "bucketization" of the input tensor, but we also wouldn't have
// control over the number of buckets, or the description of the graph.
//
// Therefore, we instead use WriteSummary, which makes it possible to
// support these features. However, the trade-off is that we have to
// implement our own "bucketization", and have to write the summary as a
// protobuf message.
const content = new messages.HistogramPluginData().setVersion(0);
const pluginData = new messages.SummaryMetadata.PluginData()
.setPluginName('histograms')
.setContent(content.serializeBinary());
const summary = new messages.SummaryMetadata()
.setPluginData(pluginData)
.setDisplayName(null)
.setSummaryDescription(description);
const summaryTensor = scalar(summary.serializeBinary(), 'string');
const nameTensor = scalar(name, 'string');
const stepScalar = new Int64Scalar(step);
const buckets = this.buckets(data, bucketCount);
util.assert(
buckets.rank === 2 && buckets.shape[1] === 3,
() => `Expected buckets to have shape [k, 3], but they had shape ${
buckets.shape}`);
util.assert(
buckets.dtype === 'float32',
() => `Expected buckets to have dtype float32, but they had dtype ${
buckets.dtype}`);
const inputArgs: Array<Tensor|Int64Scalar> =
[resourceHandle, stepScalar, buckets, nameTensor, summaryTensor];
const typeAttr = this.typeAttributeFromTensor(buckets);
const opAttrs: TFEOpAttr[] =
[{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}];
this.binding.executeOp(
'WriteSummary', opAttrs, this.getInputTensorIds(inputArgs), 0);
});
}
flushSummaryWriter(resourceHandle: Tensor): void {
const inputArgs: Tensor[] = [resourceHandle];
this.executeMultipleOutputs('FlushSummaryWriter', [], inputArgs, 0);
}
/**
* Group data into histogram buckets.
*
* @param data A `Tensor` of any shape. Must be castable to `float32`
* @param bucketCount Optional positive `number`
* @returns A `Tensor` of shape `[k, 3]` and type `float32`. The `i`th row is
* a triple `[leftEdge, rightEdge, count]` for a single bucket. The value of
* `k` is either `bucketCount`, `1` or `0`.
*/
private buckets(data: Tensor, bucketCount?: number): Tensor<tf.Rank> {
if (data.size === 0) {
return tf.tensor([], [0, 3], 'float32');
}
// 30 is the default number of buckets in the TensorFlow Python
// implementation. See
// https://github.com/tensorflow/tensorboard/blob/master/tensorboard/plugins/histogram/summary_v2.py
bucketCount = bucketCount !== undefined ? bucketCount : 30;
util.assert(
Number.isInteger(bucketCount) && bucketCount > 0,
() =>
`Expected bucket count to be a strictly positive integer, but it was ` +
`${bucketCount}`);
data = data.flatten();
data = data.cast('float32');
const min: Scalar = data.min();
const max: Scalar = data.max();
const range: Scalar = max.sub(min);
const isSingular = range.equal(0).arraySync() !== 0;
if (isSingular) {
const center = min;
const bucketStart: Scalar = center.sub(0.5);
const bucketEnd: Scalar = center.add(0.5);
const bucketCounts = tf.scalar(data.size, 'float32');
return tf.concat([bucketStart, bucketEnd, bucketCounts]).reshape([1, 3]);
}
const bucketWidth = range.div(bucketCount);
const offsets = data.sub(min);
const bucketIndices = offsets.floorDiv(bucketWidth).cast('int32');
const clampedIndices =
tf.minimum(bucketIndices, bucketCount - 1).cast('int32');
const oneHots = tf.oneHot(clampedIndices, bucketCount);
const bucketCounts = oneHots.sum(0).cast('int32');
let edges = tf.linspace(min.arraySync(), max.arraySync(), bucketCount + 1);
// Ensure last value in edges is max (TF's linspace op doesn't do this)
edges = tf.concat([edges.slice(0, bucketCount), max.reshape([1])], 0) as
tf.Tensor1D;
const leftEdges = edges.slice(0, bucketCount);
const rightEdges = edges.slice(1, bucketCount);
return tf.stack([leftEdges, rightEdges, bucketCounts.cast('float32')])
.transpose();
}
// ~ TensorBoard-related (tfjs-node-specific) backend kernels.
// ------------------------------------------------------------
memory() {
// Due to automatic garbage collection, the numbers are unreliable.
// TODO(kreeger): Since there is finalization in C, count the true
// number of undisposed tensors.
return {unreliable: true};
}
async time(f: () => void): Promise<BackendTimingInfo> {
const start = process.hrtime();
f();
// hrtime() returns tuple of [seconds, nanoseconds], and we need to return
// milliseconds.
const elapsed = process.hrtime(start);
return {kernelMs: elapsed[0] * 1000 + elapsed[1] / 1000000};
}
getNumOfSavedModels() {
return this.binding.getNumOfSavedModels();
}
}
/** Returns an instance of the Node.js backend. */
export function nodeBackend(): NodeJSKernelBackend {
return tf.findBackend('tensorflow') as NodeJSKernelBackend;
}
/** Returns the TF dtype for a given DataType. */
export function getTFDType(dataType: tf.DataType): number {
const binding = nodeBackend().binding;
switch (dataType) {
case 'float32':
return binding.TF_FLOAT;
case 'int32':
return binding.TF_INT32;
case 'bool':
return binding.TF_BOOL;
case 'complex64':
return binding.TF_COMPLEX64;
case 'string':
return binding.TF_STRING;
// tslint:disable-next-line:no-any
case 'int64' as any:
// int64 is not a generally supported dtype in TensorFlow.js
// (tfjs-core). However, it needs to be included here for the purpose of
// writing the `step` value to TensorBoard via WriteScalarSummary and
// other op kernels.
return binding.TF_INT64;
default:
const errorMessage = `Unknown dtype: ${dataType}`;
throw new Error(errorMessage);
}
}
/**
* Creates a TFEOpAttr for a 'type' OpDef attribute from a Tensor or list of
* Tensors.
*/
export function createTensorsTypeOpAttr(
attrName: string,
tensorsOrDtype: tf.Tensor|tf.Tensor[]|tf.DataType): TFEOpAttr {
if (isNullOrUndefined(tensorsOrDtype)) {
throw new Error('Invalid input tensors value.');
}
return {
name: attrName,
type: nodeBackend().binding.TF_ATTR_TYPE,
value:
(tensorsOrDtype instanceof tf.Tensor || Array.isArray(tensorsOrDtype)) ?
getTFDTypeForInputs(tensorsOrDtype) :
getTFDType(tensorsOrDtype)
};
}
// TODO(yassogba) remove? who uses this?
export function createOpAttr(
attrName: string, tensorsOrDtype: tf.Tensor|tf.Tensor[]|tf.DataType,
value: ScalarLike): TFEOpAttr {
if (isNullOrUndefined(tensorsOrDtype)) {
throw new Error('Invalid input tensors value.');
}
return {name: attrName, type: nodeBackend().binding.TF_BOOL, value};
}
/** Returns the dtype number for a single or list of input Tensors. */
function getTFDTypeForInputs(tensors: tf.Tensor|tf.Tensor[]): number {
if (isNullOrUndefined(tensors)) {
throw new Error('Invalid input tensors value.');
}
if (isArray(tensors)) {
for (let i = 0; i < tensors.length; i++) {
return getTFDType(tensors[i].dtype);
}
return -1;
} else {
return getTFDType(tensors.dtype);
}
}
export function ensureTensorflowBackend() {
tf.util.assert(
tf.getBackend() === 'tensorflow',
() => `Expect the current backend to be "tensorflow", but got "${
tf.getBackend()}"`);
} | the_stack |
import { clientAction, dispatch } from "../handler";
import { Api, Client } from "@core/types";
import { statusProducers, updateSettingsProducers } from "../lib/status";
import open from "open";
import { wait } from "@core/lib/utils/wait";
import { log } from "@core/lib/utils/logger";
clientAction<Api.Action.RequestActions["CreateOrgSamlProvider"]>({
type: "apiRequestAction",
actionType: Api.ActionType.CREATE_ORG_SAML_PROVIDER,
loggableType: "orgAction",
authenticated: true,
graphAction: true,
serialAction: true,
...statusProducers("isCreatingSamlProvider", "createSamlError"),
successStateProducer: (draft, { payload }) => {},
});
clientAction<Api.Action.RequestActions["UpdateOrgSamlSettings"]>({
type: "apiRequestAction",
actionType: Api.ActionType.UPDATE_ORG_SAML_SETTINGS,
loggableType: "orgAction",
authenticated: true,
graphAction: true,
serialAction: true,
...statusProducers("isUpdatingSamlSettings", "updatingSamlSettingsError"),
});
clientAction<Api.Action.RequestActions["DeleteExternalAuthProvider"]>({
type: "apiRequestAction",
actionType: Api.ActionType.DELETE_EXTERNAL_AUTH_PROVIDER,
loggableType: "orgAction",
authenticated: true,
graphAction: true,
serialAction: true,
...statusProducers("isDeletingAuthProvider", "deleteAuthProviderError"),
});
clientAction<Api.Action.RequestActions["GetExternalAuthProviders"]>({
type: "apiRequestAction",
actionType: Api.ActionType.GET_EXTERNAL_AUTH_PROVIDERS,
loggableType: "authAction",
authenticated: true,
stateProducer: (draft) => {
draft.isFetchingAuthProviders = true;
delete draft.fetchAuthProvidersError;
},
failureStateProducer: (draft, { payload }) => {
delete draft.isFetchingAuthProviders;
draft.fetchAuthProvidersError = payload;
},
successStateProducer: (draft, { payload }) => {
delete draft.isFetchingAuthProviders;
draft.externalAuthProviders = payload.providers;
draft.samlSettingsByProviderId = payload.samlSettingsByProviderId ?? {};
},
});
clientAction<Api.Action.RequestActions["GetExternalAuthSession"]>({
type: "apiRequestAction",
actionType: Api.ActionType.GET_EXTERNAL_AUTH_SESSION,
loggableType: "hostAction",
...statusProducers("isFetchingSession", "fetchSessionError"),
});
clientAction<Api.Action.RequestActions["CreateExternalAuthSession"]>({
type: "apiRequestAction",
actionType: Api.ActionType.CREATE_EXTERNAL_AUTH_SESSION,
loggableType: "hostAction",
stateProducer: (draft) => {
draft.creatingExternalAuthSession = true;
delete draft.pendingExternalAuthSession;
delete draft.startingExternalAuthSessionError;
delete draft.externalAuthSessionCreationError;
delete draft.authorizingExternallyErrorMessage;
},
failureStateProducer: (draft, { payload }) => {
draft.externalAuthSessionCreationError = payload;
},
endStateProducer: (draft) => {
delete draft.creatingExternalAuthSession;
},
successStateProducer: (draft, { payload }) => {
const { id, authUrl } = payload;
draft.pendingExternalAuthSession = { id, authUrl };
},
});
clientAction<Client.Action.ClientActions["ClearPendingExternalAuthSession"]>({
type: "clientAction",
actionType: Client.ActionType.CLEAR_PENDING_EXTERNAL_AUTH_SESSION,
stateProducer: (draft) => {
delete draft.isAuthorizingExternallyForSessionId;
delete draft.pendingExternalAuthSession;
},
});
clientAction<Client.Action.ClientActions["SetExternalAuthSessionResult"]>({
type: "clientAction",
actionType: Client.ActionType.SET_EXTERNAL_AUTH_SESSION_RESULT,
stateProducer: (draft, { payload }) => {
delete draft.isAuthorizingExternallyForSessionId;
delete draft.pendingExternalAuthSession;
if ("authorizingExternallyErrorMessage" in payload) {
draft.authorizingExternallyErrorMessage =
payload.authorizingExternallyErrorMessage;
} else {
const {
externalAuthSessionId,
externalAuthProviderId,
orgId,
userId,
authType,
} = payload;
draft.completedExternalAuth = {
externalAuthSessionId,
externalAuthProviderId,
orgId,
userId,
authType,
};
}
},
});
clientAction<Client.Action.ClientActions["SetInviteExternalAuthSessionResult"]>(
{
type: "clientAction",
actionType: Client.ActionType.SET_INVITE_EXTERNAL_AUTH_SESSION_RESULT,
stateProducer: (draft, { payload }) => {
delete draft.isAuthorizingExternallyForSessionId;
delete draft.pendingExternalAuthSession;
if ("authorizingExternallyErrorMessage" in payload) {
draft.authorizingExternallyErrorMessage =
payload.authorizingExternallyErrorMessage;
} else {
const {
externalAuthSessionId,
externalAuthProviderId,
orgId,
userId,
sentById,
authType,
} = payload;
draft.completedInviteExternalAuth = {
externalAuthSessionId,
externalAuthProviderId,
orgId,
userId,
sentById,
authType,
};
}
},
}
);
clientAction<Client.Action.ClientActions["WaitForExternalAuth"]>({
type: "clientAction",
actionType: Client.ActionType.WAIT_FOR_EXTERNAL_AUTH,
stateProducer: (draft, { payload }) => {
draft.isAuthorizingExternallyForSessionId = payload.externalAuthSessionId;
draft.completedExternalAuth = undefined;
delete draft.startingExternalAuthSessionError;
delete draft.externalAuthSessionCreationError;
delete draft.authorizingExternallyErrorMessage;
},
handler: async (state, { payload }, context) => {
const { externalAuthSessionId, externalAuthProviderId, authType } = payload;
let successPayload: typeof state.completedExternalAuth | undefined;
let loadResSuccessContext: Client.Context | undefined;
let awaitingLogin = true;
let iterationsLeft = 60;
let orgId: string | undefined;
let userId: string | undefined;
while (awaitingLogin) {
iterationsLeft--;
if (iterationsLeft <= 0) {
log("External login timed out", { payload });
const res = await dispatch(
{
type: Client.ActionType.SET_EXTERNAL_AUTH_SESSION_RESULT,
payload: {
authorizingExternallyErrorMessage: `External login timed out for session ${externalAuthSessionId}`,
},
},
context
);
if (
externalAuthSessionId ===
res.state.isAuthorizingExternallyForSessionId
) {
// still the same session
await dispatch(
{
type: Client.ActionType.CLEAR_PENDING_EXTERNAL_AUTH_SESSION,
},
context
);
}
return;
}
const loadRes = await dispatch(
{
type: Api.ActionType.GET_EXTERNAL_AUTH_SESSION,
payload: {
id: externalAuthSessionId,
},
},
context
);
const wasDeleted =
"payload" in loadRes.resultAction &&
"errorStatus" in loadRes.resultAction?.payload &&
loadRes.resultAction?.payload?.errorStatus === 404;
const newSessionSpawned =
externalAuthSessionId !==
loadRes.state.isAuthorizingExternallyForSessionId;
if (wasDeleted || newSessionSpawned) {
log("Another external login took over; aborting", {
thisLoop: payload,
otherSession: loadRes.state.isAuthorizingExternallyForSessionId,
});
return;
}
const resultActionPayload = (loadRes as any).resultAction
.payload as Api.Net.ApiResultTypes["GetExternalAuthSession"];
const failure = (loadRes as any)
.resultAction as Client.Action.FailureAction;
const stillWaitingExternally =
resultActionPayload.type === "requiresExternalAuthError";
if (stillWaitingExternally) {
await wait(1000);
continue;
}
if (resultActionPayload.type !== "externalAuthSession") {
await dispatch(
{
type: Client.ActionType.SET_EXTERNAL_AUTH_SESSION_RESULT,
payload: {
authorizingExternallyErrorMessage:
resultActionPayload.errorStatus?.toString() ?? failure.type!,
},
},
context
);
return;
}
// user successfully auth'd elsewhere
({ userId, orgId } = resultActionPayload.session);
log("External auth for session completed", {
externalAuthSessionId,
userId,
orgId,
});
loadResSuccessContext = {
...context,
accountIdOrCliKey: userId!,
};
awaitingLogin = false; // success
}
successPayload = {
authType,
externalAuthProviderId,
externalAuthSessionId,
orgId: orgId!,
userId: userId!,
};
await dispatch(
{
type: Client.ActionType.SET_EXTERNAL_AUTH_SESSION_RESULT,
payload: successPayload,
},
loadResSuccessContext!
);
},
});
clientAction<Client.Action.ClientActions["WaitForInviteExternalAuth"]>({
type: "clientAction",
actionType: Client.ActionType.WAIT_FOR_INVITE_EXTERNAL_AUTH,
stateProducer: (draft, { payload }) => {
draft.isAuthorizingExternallyForSessionId = payload.externalAuthSessionId;
draft.completedInviteExternalAuth = undefined;
draft.authorizingExternallyErrorMessage = undefined;
},
handler: async (state, { payload }, context) => {
const {
externalAuthSessionId,
authType,
orgId,
externalAuthProviderId,
emailToken,
encryptionToken,
loadActionType,
} = payload;
const loadActionPayload = {
emailToken,
encryptionToken,
};
let successPayload: typeof state.completedInviteExternalAuth | undefined;
let loadResSuccessContext: Client.Context | undefined;
let userId: string | undefined;
let sentById: string | undefined;
let awaitingLogin = true;
let iterationsLeft = 60;
while (awaitingLogin) {
iterationsLeft--;
if (iterationsLeft <= 0) {
log("External login timed out", { payload });
const res = await dispatch(
{
type: Client.ActionType.SET_INVITE_EXTERNAL_AUTH_SESSION_RESULT,
payload: {
authorizingExternallyErrorMessage: `External login timed out for session ${externalAuthSessionId}`,
},
},
context
);
if (
externalAuthSessionId ===
res.state.isAuthorizingExternallyForSessionId
) {
// still the same session
await dispatch(
{
type: Client.ActionType.CLEAR_PENDING_EXTERNAL_AUTH_SESSION,
},
context
);
}
return;
}
const loadRes = await dispatch(
{
type: loadActionType,
payload: loadActionPayload,
},
context
);
if (
externalAuthSessionId !==
loadRes.state.isAuthorizingExternallyForSessionId
) {
log("Another external login took over; aborting", {
thisLoop: payload,
otherSession: loadRes.state.isAuthorizingExternallyForSessionId,
});
return;
}
if (!loadRes.success) {
const resultAction =
loadRes.resultAction as Client.Action.FailureAction;
const stillWaitingExternally =
resultAction.payload.type === "requiresExternalAuthError";
if (stillWaitingExternally) {
await wait(1000);
continue;
}
await dispatch(
{
type: Client.ActionType.SET_INVITE_EXTERNAL_AUTH_SESSION_RESULT,
payload: {
authorizingExternallyErrorMessage: ("errorReason" in
resultAction.payload
? resultAction.payload.errorReason
: resultAction.payload.type)!,
},
},
context
);
return;
}
const loadedInviteOrDeviceGrant = (
payload.loadActionType === Client.ActionType.LOAD_INVITE
? loadRes.state.loadedInvite
: loadRes.state.loadedDeviceGrant
)!;
userId =
"inviteeId" in loadedInviteOrDeviceGrant
? loadedInviteOrDeviceGrant.inviteeId
: loadedInviteOrDeviceGrant.granteeId;
sentById =
"invitedByUserId" in loadedInviteOrDeviceGrant
? loadedInviteOrDeviceGrant.invitedByUserId
: loadedInviteOrDeviceGrant.grantedByUserId;
log("External auth for session completed", {
externalAuthSessionId,
userId,
});
loadResSuccessContext = {
...context,
accountIdOrCliKey: userId,
};
awaitingLogin = false; // success
}
successPayload = {
authType,
orgId,
externalAuthSessionId,
externalAuthProviderId,
userId: userId!,
sentById: sentById!,
};
await dispatch(
{
type: Client.ActionType.SET_INVITE_EXTERNAL_AUTH_SESSION_RESULT,
payload: successPayload,
},
loadResSuccessContext!
);
},
});
// returns quickly but triggers WaitForInviteExternalAuth in background
clientAction<Client.Action.ClientActions["CreateExternalAuthSessionForLogin"]>({
type: "asyncClientAction",
actionType: Client.ActionType.CREATE_EXTERNAL_AUTH_SESSION_FOR_LOGIN,
stateProducer: (draft) => {
draft.startingExternalAuthSession = true;
delete draft.startingExternalAuthSessionError;
delete draft.externalAuthSessionCreationError;
delete draft.authorizingExternallyErrorMessage;
},
failureStateProducer: (draft, { meta, payload }) => {
if (payload.type === "requiresEmailAuthError") {
draft.orgUserAccounts[meta.rootAction.payload.userId] = {
...draft.orgUserAccounts[meta.rootAction.payload.userId]!,
provider: "email",
externalAuthProviderId: undefined,
};
} else if (payload.type === "signInWrongProviderError") {
draft.orgUserAccounts[meta.rootAction.payload.userId] = {
...draft.orgUserAccounts[meta.rootAction.payload.userId]!,
provider: payload.providers[0].provider,
externalAuthProviderId: payload.providers[0].externalAuthProviderId,
};
}
draft.startingExternalAuthSessionError = payload;
},
endStateProducer: (draft) => {
delete draft.startingExternalAuthSession;
},
handler: async (
state,
{ payload },
{ context, dispatchSuccess, dispatchFailure }
) => {
const {
waitBeforeOpenMillis,
authMethod,
externalAuthProviderId,
orgId,
userId,
provider,
} = payload;
let externalAuthSessionId: string | undefined;
const sessionPayload: Api.Net.CreateExternalAuthSession = {
authType: "sign_in",
authMethod,
provider,
orgId,
userId,
externalAuthProviderId,
};
const res = await dispatch(
{
type: Api.ActionType.CREATE_EXTERNAL_AUTH_SESSION,
payload: sessionPayload,
},
context
);
if (!res.success) {
return dispatchFailure(
(res.resultAction as Client.Action.FailureAction)
.payload as Api.Net.ErrorResult,
context
);
}
const authResPayload = (res.resultAction as any)?.payload as
| Api.Net.ApiResultTypes["CreateExternalAuthSession"]
| Api.Net.RequiresEmailAuthResult;
if (authResPayload.type !== "pendingExternalAuthSession") {
// most likely, saml provider was deleted and user fell back to email auth
return dispatchFailure(authResPayload, context);
}
log("Successfully created a pending external auth session", authResPayload);
externalAuthSessionId = authResPayload.id;
const backgroundWork = () => {
// User's web browser will open and ask them to log in.
open(authResPayload.authUrl);
// BACKGROUND
// Check for successful external auth, or time out.
dispatch(
{
type: Client.ActionType.WAIT_FOR_EXTERNAL_AUTH,
payload: {
authMethod,
provider,
authType: "sign_in",
externalAuthProviderId,
externalAuthSessionId: externalAuthSessionId!,
},
},
context
);
};
setTimeout(backgroundWork, waitBeforeOpenMillis);
return dispatchSuccess(null, context);
},
});
clientAction<Client.Action.ClientActions["ResetExternalAuth"]>({
type: "clientAction",
actionType: Client.ActionType.RESET_EXTERNAL_AUTH,
stateProducer: (draft) => {
delete draft.completedExternalAuth;
delete draft.completedInviteExternalAuth;
delete draft.startingExternalAuthSessionError;
delete draft.startingExternalAuthSessionInviteError;
delete draft.externalAuthSessionCreationError;
delete draft.authorizingExternallyErrorMessage;
},
});
// returns quickly but triggers WaitForInviteExternalAuth in background
clientAction<Client.Action.ClientActions["CreateExternalAuthSessionForInvite"]>(
{
type: "asyncClientAction",
actionType: Client.ActionType.CREATE_EXTERNAL_AUTH_SESSION_FOR_INVITE,
stateProducer: (draft) => {
draft.startingExternalAuthSessionInvite = true;
delete draft.startingExternalAuthSessionInviteError;
delete draft.completedInviteExternalAuth;
},
failureStateProducer: (draft, { payload }) => {
draft.startingExternalAuthSessionInviteError = payload;
},
endStateProducer: (draft) => {
delete draft.startingExternalAuthSessionInvite;
},
handler: async (
state,
{ payload },
{ context, dispatchSuccess, dispatchFailure }
) => {
const {
authMethod,
authObjectId,
authType,
emailToken,
encryptionToken,
externalAuthProviderId,
loadActionType,
orgId,
provider,
} = payload;
let externalAuthSessionId: string | undefined;
const hostUrlEncoded = emailToken.split("_")[2];
const hostUrl = hostUrlEncoded
? Buffer.from(hostUrlEncoded, "base64").toString("utf8")
: undefined;
const reqContext = { ...context, hostUrl };
const sessionPayload: Api.Net.CreateExternalAuthSession = {
authType,
authMethod,
provider,
orgId,
authObjectId,
externalAuthProviderId,
};
const res = await dispatch(
{
type: Api.ActionType.CREATE_EXTERNAL_AUTH_SESSION,
payload: sessionPayload,
},
reqContext
);
if (!res.success) {
return dispatchFailure(
(res.resultAction as Client.Action.FailureAction)
.payload as Api.Net.ErrorResult,
reqContext
);
}
const authResPayload = (res.resultAction as any)?.payload as
| Api.Net.ApiResultTypes["CreateExternalAuthSession"]
| Api.Net.RequiresEmailAuthResult;
if (authResPayload.type !== "pendingExternalAuthSession") {
// somehow the saml provider was deleted before this user accepted their invitation
return dispatchFailure(authResPayload, reqContext);
}
log(
"Successfully created a pending external auth session",
authResPayload
);
externalAuthSessionId = authResPayload.id;
// User's web browser will open and ask them to log in.
await open(authResPayload.authUrl!);
// BACKGROUND
// Check for successful external auth, or time out.
dispatch(
{
type: Client.ActionType.WAIT_FOR_INVITE_EXTERNAL_AUTH,
payload: {
authType,
emailToken,
encryptionToken,
externalAuthProviderId,
externalAuthSessionId: externalAuthSessionId!,
loadActionType,
orgId,
},
},
reqContext
);
return dispatchSuccess(null, reqContext);
},
}
); | the_stack |
import {Component, OnDestroy, OnInit} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {PokemonService} from '../shared/pokemon.service';
import {Pokemon} from '../shared/pokemon.model';
import {Subject} from 'rxjs';
@Component({
selector: 'app-pokemon-detail',
templateUrl: './pokemon-detail.component.html',
styleUrls: ['./pokemon-detail.component.scss']
})
export class PokemonDetailComponent implements OnInit, OnDestroy {
pokemonId;
pokemon;
pokemonImageUrl;
pokemonDefaultColor;
heightInMetres;
heightInFeetInches;
weightInKgs;
weightInPounds;
pokemonStats;
maxPokemonStats = [];
minPokemonStats = [];
statsToShow = [];
maxStat;
maxMaxStat;
maxMinStat;
selectedStat = 'base';
stats: string[] = ['0%', '0%', '0%', '0%', '0%', '0%'];
imageLoading = true;
pokemonGenera;
abilities = [];
abilitySelected = 0;
allAbilitiesReceived = false;
selectedAbilityFlavorText;
selectedAbilityEffect;
selectedAbilityShortEffect;
unavailableAbilityText;
pokemonForms = [];
formattedFormNames = [];
selectedFormNo = 0;
// varietiesReversed = false; // For Magearna
formColors = {
'charizard-mega-x': 'black',
'latias-mega': 'purple',
'latios-mega': 'purple',
'castform-sunny': 'red',
'castform-rainy': 'blue',
'castform-snowy': 'purple',
'burmy-sandy': 'brown',
'burmy-trash': 'pink',
'rotom-heat': 'red',
'rotom-wash': 'blue',
'rotom-frost': 'purple',
'rotom-fan': 'yellow',
'rotom-mow': 'green',
'wormadam-sandy': 'brown',
'wormadam-trash': 'pink',
'darmanitan-zen': 'gray',
'kyurem-black': 'black',
'kyurem-white': 'white',
'oricorio-pom-pom': 'yellow',
'oricorio-pau': 'pink',
'oricorio-sensu': 'purple',
'lycanroc-midnight': 'red',
'lycanroc-dusk': 'brown',
'minior-orange-meteor': 'red', // Because its second in list actually its minior-red
'necrozma-ultra': 'yellow',
'magearna-original': 'red',
'raticate-alola': 'black',
'raichu-alola': 'brown',
'sandshrew-alola': 'blue',
'sandslash-alola': 'blue',
'vulpix-alola': 'white',
'ninetales-alola': 'white',
'meowth-alola': 'gray',
'persian-alola': 'gray',
'grimer-alola': 'green',
'muk-alola': 'green',
'marowak-alola': 'black'
};
visible = true;
imageVisible = true;
// Mega Evolution Animation
megaEvolving = false;
megaEvolveAnimationEnabled = true;
SphereVisible = false;
SigilVisible = false;
sigilEnd = false;
BubblesVisible = false;
imageLoadedForMegaEvolution = false;
imageLoadedForMegaEvolutionSubject = new Subject<boolean>();
evoChainsFetched = false;
selectedEvolutionId;
evolutionChain = [];
evolutionDesc = [];
exceptionalChainType;
evolutionChainExceptions_112 = [
'oddish',
'poliwag',
'ralts',
'cosmog'];
evolutionChainExceptions_12 = [
'slowpoke',
'nincada',
'snorunt',
'clamperl',
'burmy'];
evolutionChainExceptions_122 = [
'wurmple',
];
evolutionChainExceptions_13 = [
'tyrogue'
];
evolutionChainExceptions_18 = [
'eevee'];
typeDefences = {'4x': [], '2x': [], '1x': [], '0.5x': [], '0.25x': [], '0x': []};
typeChart = [{'name': 'normal', 'immunes': ['ghost'], 'weaknesses': ['rock', 'steel'], 'strengths': []},
{'name': 'fire', 'immunes': [], 'weaknesses': ['fire', 'water', 'rock', 'dragon'], 'strengths': ['grass', 'ice', 'bug', 'steel']},
{'name': 'water', 'immunes': [], 'weaknesses': ['water', 'grass', 'dragon'], 'strengths': ['fire', 'ground', 'rock']},
{'name': 'electric', 'immunes': ['ground'], 'weaknesses': ['electric', 'grass', 'dragon'], 'strengths': ['water', 'flying']},
{
'name': 'grass',
'immunes': [],
'weaknesses': ['fire', 'grass', 'poison', 'flying', 'bug', 'dragon', 'steel'],
'strengths': ['water', 'ground', 'rock']
},
{
'name': 'ice',
'immunes': [],
'weaknesses': ['fire', 'water', 'ice', 'steel'],
'strengths': ['grass', 'ground', 'flying', 'dragon']
},
{
'name': 'fighting',
'immunes': ['ghost'],
'weaknesses': ['poison', 'flying', 'psychic', 'bug', 'fairy'],
'strengths': ['normal', 'ice', 'rock', 'dark', 'steel']
},
{'name': 'poison', 'immunes': ['steel'], 'weaknesses': ['poison', 'ground', 'rock', 'ghost'], 'strengths': ['grass', 'fairy']},
{
'name': 'ground',
'immunes': ['flying'],
'weaknesses': ['grass', 'bug'],
'strengths': ['fire', 'electric', 'poison', 'rock', 'steel']
},
{'name': 'flying', 'immunes': [], 'weaknesses': ['electric', 'rock', 'steel'], 'strengths': ['grass', 'fighting', 'bug']},
{'name': 'psychic', 'immunes': ['dark'], 'weaknesses': ['psychic', 'steel'], 'strengths': ['fighting', 'poison']},
{
'name': 'bug',
'immunes': [],
'weaknesses': ['fire', 'fighting', 'poison', 'flying', 'ghost', 'steel', 'fairy'],
'strengths': ['grass', 'psychic', 'dark']
},
{'name': 'rock', 'immunes': [], 'weaknesses': ['fighting', 'ground', 'steel'], 'strengths': ['fire', 'ice', 'flying', 'bug']},
{'name': 'ghost', 'immunes': ['normal'], 'weaknesses': ['dark'], 'strengths': ['psychic', 'ghost']},
{'name': 'dragon', 'immunes': ['fairy'], 'weaknesses': ['steel'], 'strengths': ['dragon']},
{'name': 'dark', 'immunes': [], 'weaknesses': ['fighting', 'dark', 'fairy'], 'strengths': ['psychic', 'ghost']},
{'name': 'steel', 'immunes': [], 'weaknesses': ['fire', 'water', 'electric', 'steel'], 'strengths': ['ice', 'rock', 'fairy']},
{'name': 'fairy', 'immunes': [], 'weaknesses': ['fire', 'poison', 'steel'], 'strengths': ['fighting', 'dragon', 'dark']}];
// types = {
// 'normal': 1,
// 'fighting': 2,
// 'flying': 3,
// 'poison': 4,
// 'ground': 5,
// 'rock': 6,
// 'bug': 7,
// 'ghost': 8,
// 'steel': 9,
// 'fire': 10,
// 'water': 11,
// 'grass': 12,
// 'electric': 13,
// 'psychic': 14,
// 'ice': 15,
// 'dragon': 16,
// 'dark': 17,
// 'fairy': 18,
// 'unknown': 19,
// 'shadow': 20
// };
typeFromID = [
'normal',
'fighting',
'flying',
'poison',
'ground',
'rock',
'bug',
'ghost',
'steel',
'fire',
'water',
'grass',
'electric',
'psychic',
'ice',
'dragon',
'dark',
'fairy',
'unknown',
'shadow'];
genderDifferences = {
'003': 'Female\'s flower has a seed in it',
'012': 'Female has black (purple in Generation V) spots on her lower wings',
'019': 'Female has shorter whiskers',
'020': 'Female has shorter whiskers',
'025': 'Female\'s tail ends in the upper half of a heart',
'026': 'Female\'s tail lacks a point',
'041': 'Female has smaller fangs',
'042': 'Female has smaller fangs',
'044': 'Female has one large spot per bud',
'045': 'Female\'s petals have larger spots',
'064': 'Female has smaller whiskers',
'065': 'Female has smaller whiskers',
'084': 'Male has black necks and female has beige necks',
'085': 'Male has black necks and female has beige necks',
'097': 'Female has more collar fur',
'111': 'Male\'s horn is larger',
'112': 'Male\'s horn is larger',
'118': 'Male\'s horn is larger',
'119': 'Male\'s horn is larger',
'123': 'Female\'s abdomen is larger',
'129': 'Male has yellow whiskers and Female has white whiskers',
'130': 'Male has blue whiskers and female has white whiskers',
'133': 'Unlike other Eevee, in Pokémon: Let\'s Go, Pikachu! and Let\'s Go, Eevee!, the partner Eevee has gender differences. Male partner Eevee look the same as all other Eevee, but female partner Eevee have a unique heart-shaped tail pattern. In Generation VIII, this gender difference was applied to all female Eevee.',
'154': 'Female has smaller antennae',
'165': 'Female has smaller antennae',
'166': 'Female has smaller antennae',
'178': 'Male has three body stripes',
'185': 'Female\'s head "branch" is smaller',
'186': 'Female has smaller cheeks',
'190': 'Male has shorter head fur',
'194': 'Female has one set of gill branches',
'195': 'Female has smaller dorsal fins',
'198': 'Male\'s "hat" is larger',
'202': 'Female\'s mouth has lipstick-like marking',
'203': 'Female\'s body is more yellow',
'207': 'Female has smaller stinger',
'208': 'Female lacks an outer tooth on each side',
'212': 'Female\'s abdomen is larger',
'214': 'Female\'s horn is heart-shaped',
'215': 'Female\'s feather is shorter',
'217': 'Female has longer shoulder fur',
'221': 'Female has shorter tusks',
'224': 'Female has smaller suction cups',
'229': 'Female has shorter horns',
'232': 'Female has shorter tusks',
'255': 'Male has a black speck on his rear',
'256': 'Female has smaller head feathers',
'257': 'Female\'s head crest is smaller',
'267': 'Male\'s red spots are larger',
'269': 'Female has smaller antennae',
'272': 'Male has thicker stripes',
'274': 'Female has smaller leaf',
'275': 'Female has smaller leaves',
'307': 'Male\'s ears are higher than the female\'s',
'308': 'Male has a larger bulb on his head',
'315': 'Female\'s body leaf is longer',
'316': 'Female\'s feather is shorter',
'317': 'Female has shorter whiskers',
'322': 'Female has larger hump',
'323': 'Female has larger humps',
'332': 'Female has a large spike on her chest where male has two normal ones',
'350': 'Male\'s hair-like fins are shorter',
'369': 'Female has smaller jaw guard',
'396': 'Female\'s head is less white',
'397': 'Female\'s forehead spot is smaller',
'398': 'Female\'s forehead spot is smaller',
'399': 'Male has more tail curls',
'400': 'Male\'s face have two additional curls on beige mask',
'401': 'Female has larger collar',
'402': 'Female has smaller mustache',
'403': 'Female has blue hind feet and a shorter mane',
'404': 'Female has exposed ankles and a shorter mane',
'405': 'Female\'s mane is smaller',
'407': 'Female has longer cape',
'415': 'Male\'s lower face has no red mark',
'417': 'Female\'s head stripe is shorter',
'418': 'Male has two white spots on his back while Female has one',
'419': 'Male has two white bumps on his back while female has one',
'424': 'Male has shorter hair on his head',
'443': 'Male has grooved fin',
'444': 'Male has grooved fin',
'445': 'Male has grooved fin',
'449': 'Male and female\'s color patterns are inverted',
'450': 'Male\'s body is light brown while female\'s is dark gray',
'453': 'Female has higher "bandages"',
'454': 'Female\'s throat sac is smaller',
'456': 'Female has larger tail fins',
'457': 'Female has larger fins',
'459': 'Female\'s midsection is white',
'460': 'Female has longer chest fur',
'461': 'Female has shorter ear "feathers"',
'464': 'Female has smaller upper horn',
'465': 'Female\'s fingers are more magenta than blue',
'473': 'Female has smaller tusks',
'521': 'Male has a pink mask with long extensions while the female has a curved feather on the back of her head. Male has a green underside and female has a brown underside.',
'592': 'Male is blue, frowns, and has a ruffled collar, smooth, diamond-patterned tentacles, and one upper eyelash per eye. Female is pink, smiles, and has a bulbous collar, frilled tentacles, and one lower eyelash per eye. Males have a star-shaped head pattern and a stiff crown while females have a flower-shaped pattern and a limp crown.',
'593': 'Body color, eyes, head pattern, and tentacle differences are much the same as with Frillish, but the Female\'s eyes are now larger and have two eyelashes each. Males have a facial covering resembling a large moustache while Females have one resembling a fluffy collar. Female has a heart-shaped mouth and the male\'s is hidden inside the "moustache"',
'668': 'Male has a large mane shaped like the kanji character 大 ō (big or great), a stockier body, and half-brown front legs. Female has long, flowing hair similar to a ponytail and mostly-brown legs. Male\'s tail has a split in it.',
'678': 'Males are mostly blue with white highlights, the inverse of Females. Male\'s eyes are green with light blue sclerae, while Female\'s are red and yellow. Unlike most Pokémon, the moves Meowstic can learn vary by gender, with males learning more status moves and Females learning more attack moves. Male Meowstic have the Hidden Ability Prankster, while Female Meowstic have the Hidden Ability Competitive.',
'876': 'Males have lowered eyes, a triangular mouth pointing upwards like a frown, and more black on its torso, made to resemble a suit. Females have wider eyes, a triangular mouth pointing downwards like a smile, and more white on its torso, made to resemble an apron. Male Indeedee have more Attack, Special Attack, and Speed, while female Indeedee have more Defense, Special Defense, and HP.The two genders also have different moves.'
};
movesList = [];
delayMovesListLoad = true;
movesListLoaded = false;
levelUpMovesList = [];
machineMovesList = [];
eggMovesList = [];
tutorMovesList = [];
selectedMove = 'level-up';
selectedMoveFirstColHeader = {'level-up': 'Level', 'machine': '#', 'egg': '-', 'tutor': '-'};
selectedGameVersion = 'ultra-sun-ultra-moon';
versions = {
'red-blue': 1, 'yellow': 2, 'gold-silver': 3, 'crystal': 4, 'ruby-sapphire': 5, 'emerald': 6,
'firered-leafgreen': 7, 'diamond-pearl': 8, 'platinum': 9, 'heartgold-soulsilver': 10, 'black-white': 11, 'colosseum': 12,
'xd': 13, 'black-2-white-2': 14, 'x-y': 15, 'omega-ruby-alpha-sapphire': 16, 'sun-moon': 17, 'ultra-sun-ultra-moon': 18
};
currentMoveData;
currentMoveID = null;
moveDetails = [];
moveLevelDetails = [];
moveMachineDetails = [];
moveEggDetails = [];
moveTutorDetails = [];
moveMachineNos = [];
moveFlavorTextEntry;
moveShortEffect;
moveEffect;
moveContestType = ['cool', 'beauty', 'cute', 'smart', 'tough'];
isOnline;
constructor(private activatedRoute: ActivatedRoute,
private pokemonService: PokemonService) {
this.megaEvolveAnimationEnabled = !this.pokemonService.isMobile;
}
ngOnInit(): void {
// Initialization Logic after Pokemon Fetching in Both If and Else Conditions
this.activatedRoute.params.subscribe(
(params) => {
this.pokemonId = params['id'];
this.pokemonForms = [];
this.formattedFormNames = [];
// From List
if (this.pokemonService.pokemons[this.pokemonId - 1]) {
this.initializePokemonAndForms();
// Directly From Link
} else {
this.pokemonService.EverythingLoaded.subscribe(res => {
this.initializePokemonAndForms();
});
}
}
);
// Subscribe to online check
this.pokemonService.createOnline$().subscribe(isOnline => {
this.isOnline = isOnline;
});
}
initializePokemonAndForms() {
const pokemonFromList = this.pokemonService.pokemons[this.pokemonId - 1];
this.pokemon = new Pokemon(
pokemonFromList.name,
pokemonFromList.id,
pokemonFromList.types,
pokemonFromList.abilities,
pokemonFromList.height,
pokemonFromList.weight,
pokemonFromList.baseExperience,
pokemonFromList.heldItems,
pokemonFromList.is_default,
pokemonFromList.moves,
pokemonFromList.stats,
pokemonFromList.species,
pokemonFromList.speciesDetails,
pokemonFromList.color,
pokemonFromList.genera,
pokemonFromList.varieties,
pokemonFromList.evolutionChainID
);
this.pokemonDefaultColor = this.pokemon.color;
this.pokemonService.activePokemon.next(this.pokemon);
this.requestForms();
this.formatFormNames();
// Store as first form in array
this.pokemonForms[0] = new Pokemon(
this.pokemon.name,
this.pokemon.id,
this.pokemon.types,
this.pokemon.abilities,
this.pokemon.height,
this.pokemon.weight,
this.pokemon.baseExperience,
this.pokemon.heldItems,
this.pokemon.is_default,
this.pokemon.moves,
this.pokemon.stats,
this.pokemon.species,
this.pokemon.speciesDetails,
this.pokemon.color,
this.pokemon.genera,
this.pokemon.varieties,
this.pokemon.evolutionChainID
);
this.initializePokemonFields();
}
initializePokemonFields() {
this.selectedEvolutionId = this.pokemon.id;
this.selectedStat = 'base';
this.abilitySelected = 0;
if (this.pokemon.varieties !== undefined &&
this.formColors[this.pokemon.varieties[this.selectedFormNo]['n']] !== undefined) {
this.pokemon.color = this.formColors[this.pokemon.varieties[this.selectedFormNo]['n']];
this.pokemonService.activePokemon.next(this.pokemon);
} else {
this.pokemon.color = this.pokemonDefaultColor;
this.pokemonService.activePokemon.next(this.pokemon);
}
this.requestAbilityDetails();
if (!this.evoChainsFetched) {
this.getEvolutionChain();
this.evoChainsFetched = true;
}
if (this.pokemon.is_default) {
this.pokemonImageUrl = 'https://raw.githubusercontent.com/HybridShivam/Pokemon/master/assets/images/' +
this.pad(this.pokemon.id, 3) + '.png';
}
// }
this.getGenera();
this.heightInMetres = (this.pokemon.height * 0.1).toFixed(1);
this.heightInFeetInches = Math.floor(this.heightInMetres * 3.2808) + '"' + Math.round(((this.heightInMetres * 3.2808) % 1) * 12) + '\'';
this.weightInKgs = (this.pokemon.weight * 0.1).toFixed(1);
this.weightInPounds = (this.weightInKgs * 2.205).toFixed(1);
this.pokemonStats = [
this.pokemon.stats[0]['bs'],
this.pokemon.stats[1]['bs'],
this.pokemon.stats[2]['bs'],
this.pokemon.stats[3]['bs'],
this.pokemon.stats[4]['bs'],
this.pokemon.stats[5]['bs']
];
this.maxStat = Math.max(...this.pokemonStats);
setTimeout(() => {
this.calculateStats();
}, 500);
this.calculateTypeEffectiveness();
this.getMoves();
}
pad(number, length) {
let str = '' + number;
while (str.length < length) {
str = '0' + str;
}
return str;
}
imagePreload() {
this.imageLoading = false;
if (!this.megaEvolving) {
this.imageVisible = true;
} else {
this.imageLoadedForMegaEvolutionSubject.next(true);
this.imageLoadedForMegaEvolution = true;
}
}
calculateStats() {
for (let i = 0; i < 6; i++) {
let calculatedStat = this.pokemonStats[i] / this.maxStat * 100;
if (calculatedStat > 10) {
this.stats[i] = calculatedStat + '%';
} else {
calculatedStat = 10;
this.stats[i] = calculatedStat + '%';
}
}
this.statsToShow = this.pokemonStats;
this.calculateMinStats();
this.calculateMaxStats();
}
calculateMaxStats() {
if (this.pokemon.id === 292) { // Shedinja HP
this.maxPokemonStats[0] = 1;
} else {
this.maxPokemonStats[0] = Math.floor((2 * this.pokemonStats[0] + 31 + 63) * 100 / 100 + 100 + 10);
}
for (let i = 1; i < 6; i++) {
this.maxPokemonStats[i] = Math.floor(Math.floor((2 * this.pokemonStats[i] + 31 + 63) * 100 / 100 + 5) * 1.1);
}
this.maxMaxStat = Math.max(...this.maxPokemonStats);
}
calculateMinStats() {
if (this.pokemon.id === 292) { // Shedinja HP
this.minPokemonStats[0] = 1;
} else {
this.minPokemonStats[0] = Math.floor((2 * this.pokemonStats[0]) * 100 / 100 + 100 + 10);
}
for (let i = 1; i < 6; i++) {
this.minPokemonStats[i] = Math.floor(Math.floor((2 * this.pokemonStats[i]) * 100 / 100 + 5) * 0.9);
}
this.maxMinStat = Math.max(...this.minPokemonStats);
}
showStats(type: string) {
let stats;
let maxStat;
switch (type) {
case 'base': {
stats = this.pokemonStats;
maxStat = this.maxStat;
this.statsToShow = this.pokemonStats;
this.selectedStat = 'base';
break;
}
case 'max': {
stats = this.maxPokemonStats;
maxStat = this.maxMaxStat;
this.statsToShow = this.maxPokemonStats;
this.selectedStat = 'max';
break;
}
case 'min': {
stats = this.minPokemonStats;
maxStat = this.maxMinStat;
this.statsToShow = this.minPokemonStats;
this.selectedStat = 'min';
}
}
for (let i = 0; i < 6; i++) {
let calculatedStat = stats[i] / maxStat * 100;
if (calculatedStat > 15) {
this.stats[i] = calculatedStat + '%';
} else {
calculatedStat = 15;
this.stats[i] = calculatedStat + '%';
}
}
}
getGenera() {
this.pokemonGenera = this.pokemon.genera;
}
requestAbilityDetails() {
// const requests = [];
this.abilities = [];
for (const ability of this.pokemon.abilities) {
const abilityID = ability['id'];
this.abilities.push(this.pokemonService.abilityJSON[abilityID - 1]);
// requests.push(this.pokemonService.getAbility(ability['ability']['url']));
}
this.allAbilitiesReceived = true;
}
abilitySelect(no: number) {
this.abilitySelected = no;
this.unavailableAbilityText = '';
if (['red-blue', 'yellow', 'gold-silver', 'crystal'].indexOf(this.selectedGameVersion) !== -1) {
this.unavailableAbilityText = 'Abilities were introduced in Generation III Games!';
} else if (!this.availableInSelectedGen(this.abilities[no]['generation']['name'])) {
this.unavailableAbilityText = 'This ability didn\'t exist in the selected Games!';
} else {
// for (const entry of this.abilities[no]['flavor_text_entries']) {
// if (entry['language']['name'] === 'en' && entry['version_group']['name'] === this.selectedGameVersion) {
// this.selectedAbilityFlavorText = (entry['flavor_text']);
// break;
// }
// }
this.selectedAbilityFlavorText = this.abilities[no]['flavor_text_entries'][this.versions[this.selectedGameVersion]];
this.selectedAbilityEffect = this.abilities[no]['effect_entries']['effect'];
this.selectedAbilityShortEffect = this.abilities[no]['effect_entries']['short_effect'];
}
}
totalBaseStats() {
return (this.pokemonStats[0] + this.pokemonStats[1] + this.pokemonStats[2] + this.pokemonStats[3]
+ this.pokemonStats[4] + this.pokemonStats[5]);
}
formatFormNames() {
for (let i = 0; i < this.pokemon.varieties.length; i++) {
var formattedName;
var name = this.pokemon.varieties[i]['n'];
if (this.pokemon.id !== 774) { // excluding Minior
if (name.indexOf('-totem') !== -1 || name.indexOf('-battle-bond') !== -1) {
continue;
} else if (name.indexOf('-mega') !== -1 || name.indexOf('-primal') !== -1 || name === 'greninja-ash'
|| this.pokemon.id === 800 // Necrozma
) {
if (name === 'necrozma-dusk') {
formattedName = 'Dusk Mane Necrozma';
} else if (name === 'necrozma-dawn') {
formattedName = 'Dawn Wings Necrozma';
} else {
const re = '(' + this.pokemon.species['n'] + ')[-]([a-z]*)';
const regExp = new RegExp(re, 'g');
formattedName = name.replace(regExp, '$2 $1');
formattedName = formattedName.replace(/-/g, ' ');
}
} else if (name.indexOf('-alola') !== -1 && this.pokemon.id !== 25) { // Excluding Alola-Cap Pikachu
formattedName = 'Alolan ' + this.pokemon.species['n'];
} else {
const re = '(' + this.pokemon.species['n'] + ')[-]([a-z]*)';
const regExp = new RegExp(re, 'g');
formattedName = name.replace(regExp, '$2');
if (this.pokemon.id !== 250) { // excluding Ho-Oh
formattedName = formattedName.replace(/-/g, ' ');
}
}
} else { // If minior
if (name === 'minior-red') {
formattedName = 'core';
} else if (name === 'minior-red-meteor') {
formattedName = 'meteor';
} else {
continue;
}
}
this.formattedFormNames.push(formattedName);
}
}
requestForms() {
// if (this.pokemon.id === 801 && !this.varietiesReversed) { // For magearna Reverse the varieties
// this.pokemon.varieties.reverse();
// this.varietiesReversed = true;
// }
const formIDs = [];
if (this.pokemon.id !== 774) { // Skipping Minior
for (const variety of this.pokemon.varieties.slice(1)) {
if (variety['n'].indexOf('-totem') !== -1 || variety['n'] === 'greninja-battle-bond') {
continue;
// Skipping these forms
}
formIDs.push(variety['id']);
// formRequests.push(this.pokemonService.getPokemonByURL(variety['id']));
}
} else {
for (const variety of this.pokemon.varieties.slice(1)) {
if (variety['n'] === 'minior-red' || variety['n'] === 'minior-red-meteor') {
formIDs.push(variety['id']);
}
}
}
let i = 1;
for (const id of formIDs) {
const form = this.pokemonService.pokemonJSON[id.toString()];
this.pokemonForms[i] = new Pokemon(
form['N'],
form['id'],
form['T'],
form['Ab'],
form['H'],
form['W'],
form['BE'],
form['HI'],
form['isD'],
this.pokemonService.pokemonMovesCSV[id.toString()],
form['St'],
form['Sp'],
this.pokemon.speciesDetails,
this.pokemon.color,
this.pokemon.genera,
this.pokemon.varieties,
this.pokemon.evolutionChainID,
);
i = i + 1;
}
}
selectForm(i) {
if (this.selectedFormNo === i || this.pokemonForms[i] === undefined) {
return;
}
this.currentMoveID = null;
this.visible = false;
if ((this.pokemonForms[i].name.indexOf('-mega') !== -1) && (this.pokemonForms[this.selectedFormNo].name.indexOf('-mega') === -1)
&& (this.megaEvolveAnimationEnabled) && (this.isOnline) && (this.pokemonService.megaEvolutionMainSwitch) && (!this.imageLoading)) {
this.megaEvolve();
} else {
this.imageVisible = false;
// this.megaEvolveAnimationEnabled = false;
}
setTimeout(() => {
this.selectedFormNo = i;
if (this.pokemonForms[i].name === this.pokemon.species['n']) {
this.pokemon.name = this.pokemon.species['n'];
} else if ((this.pokemon.id !== 25) // excluding Pikachu
&& ((this.pokemonForms[i].name.indexOf('-mega') !== -1)
|| (this.pokemonForms[i].name.indexOf('-primal') !== -1)
|| (this.pokemonForms[i].name.indexOf('-alola') !== -1)
|| (this.pokemonForms[i].name === 'greninja-ash')
|| (this.pokemon.id === 800)) // Necrozma
) {
this.pokemon.name = this.formattedFormNames[i];
} else {
this.pokemon.name = this.pokemon.species['n'] + ' [' + this.formattedFormNames[i] + ']';
}
this.pokemon.types = this.pokemonForms[i].types;
this.pokemon.abilities = this.pokemonForms[i].abilities;
this.pokemon.height = this.pokemonForms[i].height;
this.pokemon.weight = this.pokemonForms[i].weight;
this.pokemon.baseExperience = this.pokemonForms[i].baseExperience;
this.pokemon.heldItems = this.pokemonForms[i].heldItems;
this.pokemon.is_default = this.pokemonForms[i].is_default;
this.pokemon.moves = this.pokemonService.pokemonMovesCSV[this.pokemonForms[i].id.toString()];
this.pokemon.stats = this.pokemonForms[i].stats;
this.pokemon.species = this.pokemonForms[i].species;
// For Non Default Forms Only
if (!this.pokemon.is_default) {
const re = '(' + this.pokemon.species['n'] + ')[-]([a-z]*)';
const regExp = new RegExp(re, 'g');
const str = this.pokemonForms[i].name.replace(regExp, '$2');
this.pokemonImageUrl = 'https://raw.githubusercontent.com/HybridShivam/Pokemon/master/assets/images/' +
this.pad(this.pokemon.id, 3) + '-' + this.capitalizeSplitJoin(str, '-', '-') + '.png';
}
// For Default Forms and Initializing Fields
this.initializePokemonFields();
this.visible = true;
}, 400);
}
megaEvolve() {
this.megaEvolving = true;
this.imageLoadedForMegaEvolution = false;
this.imageVisible = false;
setTimeout(() => {
this.SigilVisible = true;
this.SphereVisible = true;
this.BubblesVisible = true;
}, 250);
setTimeout(() => {
if (this.imageLoadedForMegaEvolution) {
this.SphereVisible = false;
this.BubblesVisible = false;
this.imageVisible = true;
this.imageLoadedForMegaEvolution = false;
this.sigilEnd = true;
setTimeout(() => {
this.SigilVisible = false;
}, 2100);
setTimeout(() => {
this.megaEvolving = false;
}, 2100);
} else {
const imageLoadedForMegaEvolutionSubscription = this.imageLoadedForMegaEvolutionSubject.subscribe((response) => {
if (response) {
this.SphereVisible = false;
this.BubblesVisible = false;
this.imageVisible = true;
this.imageLoadedForMegaEvolution = false;
this.sigilEnd = true;
setTimeout(() => {
this.SigilVisible = false;
}, 2100);
setTimeout(() => {
this.megaEvolving = false;
}, 2100);
imageLoadedForMegaEvolutionSubscription.unsubscribe();
}
});
}
}, 5000);
this.megaEvolveAnimationEnabled = false;
}
getEvolutionChain() {
this.evolutionDesc = [];
this.exceptionalChainType = '';
const evoID = this.pokemon.evolutionChainID;
const response = this.pokemonService.evolutionChains[evoID - 1];
this.evolutionChain = [];
let chain = response['chain'];
if (this.evolutionChainExceptions_112.indexOf(chain['species']['name']) > -1) {
this.exceptionalChainType = '112';
} else if (this.evolutionChainExceptions_12.indexOf(chain['species']['name']) > -1) {
this.exceptionalChainType = '12';
} else if (this.evolutionChainExceptions_13.indexOf(chain['species']['name']) > -1) {
this.exceptionalChainType = '13';
} else if (this.evolutionChainExceptions_18.indexOf(chain['species']['name']) > -1) {
this.exceptionalChainType = '18';
} else if (this.evolutionChainExceptions_122.indexOf(chain['species']['name']) > -1) {
this.exceptionalChainType = '122';
}
var nextChain, i;
switch (this.exceptionalChainType) {
case '': // Normal Case
do {
this.evolutionChain.push([
chain['species']['name'], // 0
this.getIdfromURL(chain['species']['url']), // 1
chain['is_baby'], // 2
chain['evolution_details'] // 3
]);
chain = chain['evolves_to'][0];
} while (chain !== undefined);
break;
case '112':
nextChain = chain;
this.evolutionChain.push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
nextChain = chain['evolves_to'][0];
this.evolutionChain.push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
this.evolutionChain[2] = [];
i = 0;
while (chain['evolves_to'][0]['evolves_to'][i] !== undefined) {
nextChain = chain['evolves_to'][0]['evolves_to'][i];
this.evolutionChain[2].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
i++;
}
break;
case '12':
case '13':
case '18':
nextChain = chain;
this.evolutionChain.push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
this.evolutionChain[1] = [];
i = 0;
while (chain['evolves_to'][i] !== undefined) {
nextChain = chain['evolves_to'][i];
this.evolutionChain[1].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
i++;
}
break;
case '122':
nextChain = chain;
this.evolutionChain.push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
this.evolutionChain[1] = [];
nextChain = chain['evolves_to'][0]; // silcoon
this.evolutionChain[1].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
nextChain = chain['evolves_to'][1]; // cascoon
this.evolutionChain[1].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
this.evolutionChain[2] = [];
nextChain = chain['evolves_to'][0]['evolves_to'][0]; // Beautifly
this.evolutionChain[2].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
nextChain = chain['evolves_to'][1]['evolves_to'][0]; // Dustox
this.evolutionChain[2].push([
nextChain['species']['name'], // 0
this.getIdfromURL(nextChain['species']['url']), // 1
nextChain['is_baby'], // 2
nextChain['evolution_details'] // 3
]);
}
this.generateEvolutionMethods();
}
generateEvolutionMethods() {
var i;
switch (this.exceptionalChainType) {
case '': // Normal Case
for (let link of this.evolutionChain) {
let stage = link[3][0];
if (stage !== undefined) {
this.evolutionDesc.push(this.generateEvolutionMethodsLogic(stage));
}
}
break;
case '112':
i = 0;
for (let link of this.evolutionChain) {
if (i === this.evolutionChain.length - 1) {
// Last Stage
this.evolutionDesc.push([]);
for (let sideStage of link) {
sideStage = sideStage[3][0];
this.evolutionDesc[i - 1].push(this.generateEvolutionMethodsLogic(sideStage));
}
} else {
// Initial Stages
let stage = link[3][0];
if (stage !== undefined) {
this.evolutionDesc.push(this.generateEvolutionMethodsLogic(stage));
}
}
i++;
}
break;
case '12':
case '13':
case '14':
case '18':
i = 0;
for (const link of this.evolutionChain) {
if (i === this.evolutionChain.length - 1) {
// Last Stage
this.evolutionDesc.push([]);
for (let sideStage of link) {
sideStage = sideStage[3][0];
this.evolutionDesc[i - 1].push(this.generateEvolutionMethodsLogic(sideStage));
}
} else {
// Initial Stages
let stage = link[3][0];
if (stage !== undefined) {
this.evolutionDesc.push(this.generateEvolutionMethodsLogic(stage));
}
}
i++;
}
break;
case '122':
this.evolutionDesc = [['Level 7 based on PV', 'Level 7 based on PV'],
['Level 10+', 'Level 10+']];
}
}
generateEvolutionMethodsLogic(stage) {
let desc = '';
switch (stage['trigger']['name']) {
case 'level-up':
if (stage['min_level'] !== null) {
desc = 'Level ' + stage['min_level'] + '+';
} else {
desc = 'Level up';
}
if (stage['gender'] !== null) {
let gender;
if (stage['gender'] === 2) {
gender = '(Male)';
} else {
gender = '(Female)';
}
desc = desc + ' ' + gender;
}
if (stage['held_item'] !== null) {
const held_item = this.capitalizeSplitJoin(stage['held_item']['name'], '-', ' ');
desc = desc + ' holding ' + held_item;
}
if (stage['known_move'] !== null) {
const known_move = this.capitalizeSplitJoin(stage['known_move']['name'], '-', ' ');
desc = desc + ' knowing ' + known_move;
}
if (stage['known_move_type'] !== null) {
const known_move_type = this.capitalizeSplitJoin(stage['known_move_type']['name'], '-', ' ');
desc = desc + ' knowing a ' + known_move_type + ' move';
}
if (stage['min_affection'] !== null) {
const min_affection = stage['min_affection'];
desc = desc + ' with ' + min_affection + '+ Affection';
}
if (stage['min_beauty'] !== null) {
const min_beauty = stage['min_beauty'];
desc = desc + ' with ' + min_beauty + '+ Beauty';
}
if (stage['min_happiness'] !== null) {
const min_happiness = stage['min_happiness'];
desc = desc + ' with ' + min_happiness + '+ Happiness';
}
if (stage['relative_physical_stats'] !== null) {
let sign;
if (stage['relative_physical_stats'] === 1) {
sign = '>';
} else if (stage['relative_physical_stats'] === -1) {
sign = '<';
} else {
sign = '=';
}
desc = desc + ' with Attack ' + sign + ' Defence';
}
if (stage['party_species'] !== null) {
const party_species = this.capitalizeSplitJoin(stage['party_species']['name'], '-', ' ');
desc = desc + ' with ' + party_species + ' in party';
}
if (stage['party_type'] !== null) {
const party_type = this.capitalizeSplitJoin(stage['party_type']['name'], '-', ' ');
desc = desc + ' with a ' + party_type + ' type in party';
}
if (stage['location'] !== null) {
const location = this.capitalizeSplitJoin(stage['location']['name'], '-', ' ');
desc = desc + ' at ' + location;
}
if (stage['needs_overworld_rain'] !== false) {
desc = desc + ' during Rain';
}
if (stage['time_of_day'] !== '') {
const time_of_day = this.capitalizeSplitJoin(stage['time_of_day'], '-', ' ');
desc = desc + ' at ' + time_of_day + 'time';
}
if (stage['turn_upside_down'] !== false) {
desc = desc + ' holding 3DS upside-down';
}
// item:null;
// trade_species:null;
break;
case 'trade':
desc = 'Trade';
if (stage['held_item'] !== null) {
const held_item = this.capitalizeSplitJoin(stage['held_item']['name'], '-', ' ');
desc = desc + ' holding ' + held_item;
}
if (stage['trade_species'] !== null) {
const trade_species = this.capitalizeSplitJoin(stage['trade_species']['name'], '-', ' ');
desc = desc + ' with ' + trade_species;
}
if (stage['gender'] !== null) {
let gender;
if (stage['gender'] === 2) {
gender = '(Male)';
} else {
gender = '(Female)';
}
desc = desc + ' ' + gender;
}
break;
case 'use-item':
desc = 'Use';
if (stage['item'] !== null) {
const item = this.capitalizeSplitJoin(stage['item']['name'], '-', ' ');
desc = desc + ' ' + item;
}
if (stage['gender'] !== null) {
let gender;
if (stage['gender'] === 2) {
gender = '(Male)';
} else {
gender = '(Female)';
}
desc = desc + ' ' + gender;
}
break;
case 'shed':
desc = 'Level 20, with empty PokéBall and an open slot in party';
break;
}
return desc;
}
selectEvolution(id) {
if (this.selectedEvolutionId !== id) {
this.selectedFormNo = 0;
this.imageVisible = false;
this.selectedEvolutionId = id;
this.currentMoveID = null;
}
}
getIdfromURL(url): number {
let myRegex = /https:\/\/pokeapi.co\/api\/v2\/pokemon-species\/(.+)\//g;
let match = myRegex.exec(url);
return +match[1];
}
generateGenderRates() {
const rate = this.pokemon.speciesDetails['GeR'];
switch (rate) {
case -1:
return '<span class="' + this.pokemon.color + '-text"' + '>Genderless <i class="fa fa-genderless"></i></span>';
case 0:
return '<span class="gender-male">100% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 0% <i class="fa fa-venus"></i></span>';
case 1:
return '<span class="gender-male">87.5% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 12.5% <i class="fa fa-venus"></i></span>';
case 2:
return '<span class="gender-male">75% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 25% <i class="fa fa-venus"></i></span>';
case 3:
return '<span class="gender-male">62.5% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 37.5% <i class="fa fa-venus"></i></span>';
case 4:
return '<span class="gender-male">50% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 50% <i class="fa fa-venus"></i></span>';
case 5:
return '<span class="gender-male">37.5% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 62.5% <i class="fa fa-venus"></i></span>';
case 6:
return '<span class="gender-male">25% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 75% <i class="fa fa-venus"></i></span>';
case 7:
return '<span class="gender-male">12.5% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 87.5% <i class="fa fa-venus"></i></span>';
case 8:
return '<span class="gender-male">0% <i class="fa fa-mars"></i></span>,<span class="gender-female"> 100% <i class="fa fa-venus"></i></span>';
}
}
getExperiencePoints(growth_rate: string) {
switch (growth_rate) {
case 'slow':
return '1250000';
case 'medium':
return '1000000';
case 'fast':
return '800000';
case 'medium-slow':
return '1059860';
case 'slow-then-very-fast':
return '600000';
case 'fast-then-very-slow':
return '1640000';
}
}
calculateTypeEffectiveness() {
this.typeDefences = {'4x': [], '2x': [], '1x': [], '0.5x': [], '0.25x': [], '0x': []};
let type1 = this.pokemon.types[0]['n'];
let type2;
if (this.pokemon.types[1] !== undefined) {
type2 = this.pokemon.types[1]['n'];
} else {
type2 = '';
}
for (const type of this.typeChart) {
if ((type['immunes'].indexOf(type1) !== -1) || (type['immunes'].indexOf(type2) !== -1)) { // 0x
this.typeDefences['0x'].push(type['name']);
} else if ((type['weaknesses'].indexOf(type1) !== -1) && (type['weaknesses'].indexOf(type2) !== -1)) { // 0.25x
this.typeDefences['0.25x'].push(type['name']);
} else if (((type['strengths'].indexOf(type1) !== -1) && (type['strengths'].indexOf(type2) !== -1))) { // 4x
this.typeDefences['4x'].push(type['name']);
} else if (((type['strengths'].indexOf(type1) !== -1) && (type['weaknesses'].indexOf(type2) !== -1))
|| (((type['strengths'].indexOf(type2) !== -1) && (type['weaknesses'].indexOf(type1) !== -1)))) { // 1x
this.typeDefences['1x'].push(type['name']);
} else if (((type['strengths'].indexOf(type1) === -1) && (type['weaknesses'].indexOf(type2) !== -1))
|| (((type['strengths'].indexOf(type2) === -1) && (type['weaknesses'].indexOf(type1) !== -1)))) { // 0.5x
this.typeDefences['0.5x'].push(type['name']);
} else if (((type['strengths'].indexOf(type1) !== -1) && (type['weaknesses'].indexOf(type2) === -1))
|| (((type['strengths'].indexOf(type2) !== -1) && (type['weaknesses'].indexOf(type1) === -1)))) { // 2x
this.typeDefences['2x'].push(type['name']);
} else {
this.typeDefences['1x'].push(type['name']); // 1x
}
}
}
calculateCatchRate(genderRate) {
let perc;
perc = (genderRate / (3 * 255) * 100).toFixed(1);
perc = perc + '% PokéBall & Full HP';
return perc;
}
getFriendShip(friendship) {
if (friendship > 100) {
return 'High';
} else if (friendship > 70) {
return 'Higher than Normal';
} else if (friendship === 70) {
return 'Normal';
} else if (friendship >= 35) {
return 'Lower than Normal';
} else if (friendship > 0) {
return 'Low';
} else {
return 'Minimum';
}
}
getMoves() {
const version = this.selectedGameVersion;
this.levelUpMovesList = [];
this.machineMovesList = [];
this.eggMovesList = [];
this.tutorMovesList = [];
this.movesListLoaded = false;
if (this.delayMovesListLoad) {
setTimeout(() => {
this.getMovesLogic(version);
}, 2000);
} else {
this.getMovesLogic(version);
}
}
getMovesLogic(version) {
// const moveLearnMethods = {'level-up': 1, 'egg': 2, 'tutor': 3, 'machine': 4};
const versionID = this.versions[version];
for (const move of this.pokemon.moves) {
// for (const versionGroup of move['version_group_details']) {
if (move.version_group_id == versionID) {
const moveDetails = this.pokemonService.movesDetails[move.move_id - 1];
switch (move.pokemon_move_method_id) {
case '1': // level-up
this.levelUpMovesList.push([this.capitalizeSplitJoin(moveDetails.identifier, '-', ' '), move.level,
moveDetails, move.move_id]);
break;
case '2': // egg
this.eggMovesList.push([this.capitalizeSplitJoin(moveDetails.identifier, '-', ' '), move.level,
moveDetails, move.move_id]);
break;
case '3': // tutor
this.tutorMovesList.push([this.capitalizeSplitJoin(moveDetails.identifier, '-', ' '), move.level,
moveDetails, move.move_id]);
break;
case '4': // machine
this.machineMovesList.push([this.capitalizeSplitJoin(moveDetails.identifier, '-', ' '), move.level,
moveDetails, move.move_id]);
// this.moveMachineNos.push(this.fetchMachineDetailsFromCSVDataSlow(moveDetails.id, versionID));
break;
}
}
// }
}
this.levelUpMovesList = this.levelUpMovesList.sort((obj1, obj2) => {
if (+obj1[1] > +obj2[1]) {
return 1;
}
if (+obj1[1] < +obj2[1]) {
return -1;
}
return 0;
});
switch (this.selectedMove) {
case 'level-up':
this.movesList = this.levelUpMovesList;
break;
case 'machine':
this.movesList = this.machineMovesList;
break;
case 'egg':
this.movesList = this.eggMovesList;
break;
case 'tutor':
this.movesList = this.tutorMovesList;
break;
}
this.movesListLoaded = true;
this.getMoveDetails();
}
selectMovesByLearnMethod(moveToSelect) {
if (this.selectedMove === moveToSelect) {
return;
} else {
this.currentMoveID = null;
switch (moveToSelect) {
case 'level-up':
this.movesList = this.levelUpMovesList;
this.moveDetails = this.moveLevelDetails;
this.selectedMove = 'level-up';
break;
case 'machine':
this.movesList = this.machineMovesList;
this.moveDetails = this.moveMachineDetails;
this.selectedMove = 'machine';
break;
case 'egg':
this.movesList = this.eggMovesList;
this.moveDetails = this.moveEggDetails;
this.selectedMove = 'egg';
break;
case 'tutor':
this.movesList = this.tutorMovesList;
this.moveDetails = this.moveTutorDetails;
this.selectedMove = 'tutor';
break;
}
}
}
getDamageClass(id) {
switch (id) {
case '1':
return 'Status';
case '2':
return 'Physical';
case '3':
return 'Special';
}
}
selectGameVersion(name) {
if (this.selectedGameVersion === name) {
return;
} else {
this.delayMovesListLoad = false;
this.selectedGameVersion = name;
this.currentMoveID = null;
this.getMoves();
this.delayMovesListLoad = true;
}
}
availableInSelectedGen(whatWeChecking) {
let gen;
let selectedGameGen;
switch (whatWeChecking) {
case 'generation-i':
gen = 1;
break;
case 'generation-ii':
gen = 2;
break;
case 'generation-iii':
gen = 3;
break;
case 'generation-iv':
gen = 4;
break;
case 'generation-v':
gen = 5;
break;
case 'generation-vi':
gen = 6;
break;
case 'generation-vii':
gen = 7;
break;
}
switch (this.selectedGameVersion) {
case 'red-blue':
selectedGameGen = 1;
break;
case 'yellow':
selectedGameGen = 1;
break;
case 'gold-silver':
selectedGameGen = 2;
break;
case 'crystal':
selectedGameGen = 2;
break;
case 'ruby-sapphire':
selectedGameGen = 3;
break;
case 'emerald':
selectedGameGen = 3;
break;
case 'firered-leafgreen':
selectedGameGen = 3;
break;
case 'diamond-pearl':
selectedGameGen = 4;
break;
case 'platinum':
selectedGameGen = 4;
break;
case 'heartgold-soulsilver':
selectedGameGen = 4;
break;
case 'black-white':
selectedGameGen = 5;
break;
case 'black-2-white-2':
selectedGameGen = 5;
break;
case 'x-y':
selectedGameGen = 6;
break;
case 'omega-ruby-alpha-sapphire':
selectedGameGen = 6;
break;
case 'sun-moon':
selectedGameGen = 7;
break;
case 'ultra-sun-ultra-moon':
selectedGameGen = 7;
break;
}
if (gen > selectedGameGen) {
return false;
} else {
return true;
}
}
selectMove(id) {
if (this.moveDetails.length === 0) {
return;
}
this.currentMoveData = this.moveDetails[id];
this.currentMoveID = id;
if (this.currentMoveData['effect_chance'] !== null) {
this.moveShortEffect = this.currentMoveData['effect_entries']['short_effect'].replace(/\$effect_chance/g,
this.movesList[id][2].effect_chance);
this.moveEffect = this.currentMoveData['effect_entries']['effect'].replace(/\$effect_chance/g,
this.movesList[id][2].effect_chance);
} else {
this.moveShortEffect = this.currentMoveData['effect_entries']['short_effect'];
this.moveEffect = this.currentMoveData['effect_entries']['effect'];
}
if (this.selectedGameVersion === 'red-blue' || this.selectedGameVersion === 'yellow') {
this.moveFlavorTextEntry = 'Selected games had no flavor text entries!';
} else {
// for (const entry of this.currentMoveData['flavor_text_entries']) {
if (this.currentMoveData['flavor_text_entries'][this.versions[this.selectedGameVersion]] !== undefined) {
this.moveFlavorTextEntry = this.currentMoveData['flavor_text_entries'][this.versions[this.selectedGameVersion]];
// break;
}
// }
}
}
getMoveDetails() {
const moveRequests = [[], [], [], []];
this.moveDetails = [];
this.moveLevelDetails = [];
this.moveMachineDetails = [];
this.moveMachineNos = [];
this.moveEggDetails = [];
this.moveTutorDetails = [];
for (const move of this.levelUpMovesList) {
const moveID = move[3].replace(/http(s)?:\/\/pokeapi.co\/api\/v2\/move\/(\d+)\//, '$2');
this.moveLevelDetails.push(this.pokemonService.moveJSON[moveID - 1]);
// moveRequests[0].push(this.pokemonService.getMoveByURL(move[3]));
}
for (const move of this.machineMovesList) {
const moveID = move[3].replace(/http(s)?:\/\/pokeapi.co\/api\/v2\/move\/(\d+)\//, '$2');
this.moveMachineDetails.push(this.pokemonService.moveJSON[moveID - 1]);
this.getAndAddMachineNo(this.pokemonService.moveJSON[moveID - 1]['machines']);
}
for (const move of this.eggMovesList) {
const moveID = move[3].replace(/http(s)?:\/\/pokeapi.co\/api\/v2\/move\/(\d+)\//, '$2');
this.moveEggDetails.push(this.pokemonService.moveJSON[moveID - 1]);
// moveRequests[2].push(this.pokemonService.getMoveByURL(move[3]));
}
for (const move of this.tutorMovesList) {
const moveID = move[3].replace(/http(s)?:\/\/pokeapi.co\/api\/v2\/move\/(\d+)\//, '$2');
this.moveTutorDetails.push(this.pokemonService.moveJSON[moveID - 1]);
}
switch (this.selectedMove) {
case 'level-up':
this.moveDetails = this.moveLevelDetails;
break;
case 'machine':
this.moveDetails = this.moveMachineDetails;
break;
case 'egg':
this.moveDetails = this.moveEggDetails;
break;
case 'tutor':
this.moveDetails = this.moveTutorDetails;
break;
}
}
getAndAddMachineNo(machines) {
this.moveMachineNos.push(this.fetchMachineNumberFromCSVData(machines[this.versions[this.selectedGameVersion]]));
}
fetchMachineNumberFromCSVData(machineID) {
const machineNumber = this.pokemonService.machineDetails[machineID - 1].machine_number;
return +machineNumber <= 100 ? 'TM' + this.pad(machineNumber, 2)
: 'HM' + this.pad(machineNumber - 100, 2);
}
capitalizeSplitJoin(str, split: string, join: string) {
str = str.split(split);
for (let i = 0, x = str.length; i < x; i++) {
str[i] = str[i][0].toUpperCase() + str[i].substr(1);
}
return str.join(join);
}
ngOnDestroy() {
this.pokemonService.activePokemon.next(null);
this.pokemonService.previousPokemonID.next(this.pokemonId);
// Closing Open Modals
const body = document.getElementsByTagName('body')[0];
body.classList.remove('modal-open');
body.style.paddingRight = 'unset';
const elements = document.getElementsByClassName('modal-backdrop');
while (elements.length > 0) {
elements[0].remove();
}
}
} | the_stack |
import { FillQuoteTransformerOrderType, RfqOrderFields, Signature } from '@0x/protocol-utils';
import { BigNumber } from '@0x/utils';
import _ = require('lodash');
import { MarketOperation, NativeOrderWithFillableAmounts } from '../types';
import {
CollapsedFill,
DexSample,
ERC20BridgeSource,
FillData,
MultiHopFillData,
NativeCollapsedFill,
NativeFillData,
NativeLimitOrderFillData,
NativeRfqOrderFillData,
RawQuotes,
} from './market_operation_utils/types';
import { QuoteRequestor, V4RFQIndicativeQuoteMM } from './quote_requestor';
export interface QuoteReportEntryBase {
liquiditySource: ERC20BridgeSource;
makerAmount: BigNumber;
takerAmount: BigNumber;
fillData: FillData;
}
export interface BridgeQuoteReportEntry extends QuoteReportEntryBase {
liquiditySource: Exclude<ERC20BridgeSource, ERC20BridgeSource.Native>;
}
export interface MultiHopQuoteReportEntry extends QuoteReportEntryBase {
liquiditySource: ERC20BridgeSource.MultiHop;
hopSources: ERC20BridgeSource[];
}
export interface NativeLimitOrderQuoteReportEntry extends QuoteReportEntryBase {
liquiditySource: ERC20BridgeSource.Native;
fillData: NativeFillData;
fillableTakerAmount: BigNumber;
isRFQ: false;
}
export interface NativeRfqOrderQuoteReportEntry extends QuoteReportEntryBase {
liquiditySource: ERC20BridgeSource.Native;
fillData: NativeFillData;
fillableTakerAmount: BigNumber;
isRFQ: true;
nativeOrder: RfqOrderFields;
makerUri: string;
comparisonPrice?: number;
}
export interface IndicativeRfqOrderQuoteReportEntry extends QuoteReportEntryBase {
liquiditySource: ERC20BridgeSource.Native;
fillableTakerAmount: BigNumber;
isRFQ: true;
makerUri?: string;
comparisonPrice?: number;
}
export type QuoteReportEntry =
| BridgeQuoteReportEntry
| MultiHopQuoteReportEntry
| NativeLimitOrderQuoteReportEntry
| NativeRfqOrderQuoteReportEntry;
export type ExtendedQuoteReportEntry =
| BridgeQuoteReportEntry
| MultiHopQuoteReportEntry
| NativeLimitOrderQuoteReportEntry
| NativeRfqOrderQuoteReportEntry
| IndicativeRfqOrderQuoteReportEntry;
export type ExtendedQuoteReportIndexedEntry = ExtendedQuoteReportEntry & {
quoteEntryIndex: number;
isDelivered: boolean;
};
export type ExtendedQuoteReportIndexedEntryOutbound = Omit<ExtendedQuoteReportIndexedEntry, 'fillData'> & {
fillData?: string;
};
export interface QuoteReport {
sourcesConsidered: QuoteReportEntry[];
sourcesDelivered: QuoteReportEntry[];
}
export interface ExtendedQuoteReportSources {
sourcesConsidered: ExtendedQuoteReportIndexedEntry[];
sourcesDelivered: ExtendedQuoteReportIndexedEntry[] | undefined;
}
export interface ExtendedQuoteReport {
quoteId?: string;
taker?: string;
timestamp: number;
firmQuoteReport: boolean;
submissionBy: 'taker' | 'metaTxn' | 'rfqm';
buyAmount?: string;
sellAmount?: string;
buyTokenAddress: string;
sellTokenAddress: string;
integratorId?: string;
slippageBips?: number;
zeroExTransactionHash?: string;
decodedUniqueId?: string;
sourcesConsidered: ExtendedQuoteReportIndexedEntryOutbound[];
sourcesDelivered: ExtendedQuoteReportIndexedEntryOutbound[] | undefined;
}
export interface PriceComparisonsReport {
dexSources: BridgeQuoteReportEntry[];
multiHopSources: MultiHopQuoteReportEntry[];
nativeSources: Array<NativeLimitOrderQuoteReportEntry | NativeRfqOrderQuoteReportEntry>;
}
/**
* Generates a report of sources considered while computing the optimized
* swap quote, and the sources ultimately included in the computed quote.
*/
export function generateQuoteReport(
marketOperation: MarketOperation,
nativeOrders: NativeOrderWithFillableAmounts[],
liquidityDelivered: ReadonlyArray<CollapsedFill> | DexSample<MultiHopFillData>,
comparisonPrice?: BigNumber | undefined,
quoteRequestor?: QuoteRequestor,
): QuoteReport {
const nativeOrderSourcesConsidered = nativeOrders.map(order =>
nativeOrderToReportEntry(order.type, order as any, order.fillableTakerAmount, comparisonPrice, quoteRequestor),
);
const sourcesConsidered = [...nativeOrderSourcesConsidered.filter(order => order.isRFQ)];
let sourcesDelivered;
if (Array.isArray(liquidityDelivered)) {
// create easy way to look up fillable amounts
const nativeOrderSignaturesToFillableAmounts = _.fromPairs(
nativeOrders.map(o => {
return [_nativeDataToId(o), o.fillableTakerAmount];
}),
);
// map sources delivered
sourcesDelivered = liquidityDelivered.map(collapsedFill => {
if (_isNativeOrderFromCollapsedFill(collapsedFill)) {
return nativeOrderToReportEntry(
collapsedFill.type,
collapsedFill.fillData,
nativeOrderSignaturesToFillableAmounts[_nativeDataToId(collapsedFill.fillData)],
comparisonPrice,
quoteRequestor,
);
} else {
return dexSampleToReportSource(collapsedFill, marketOperation);
}
});
} else {
sourcesDelivered = [
// tslint:disable-next-line: no-unnecessary-type-assertion
multiHopSampleToReportSource(liquidityDelivered as DexSample<MultiHopFillData>, marketOperation),
];
}
return {
sourcesConsidered,
sourcesDelivered,
};
}
/**
* Generates a report of sources considered while computing the optimized
* swap quote, the sources ultimately included in the computed quote. This
* extende version incudes all considered quotes, not only native liquidity.
*/
export function generateExtendedQuoteReportSources(
marketOperation: MarketOperation,
quotes: RawQuotes,
liquidityDelivered: ReadonlyArray<CollapsedFill> | DexSample<MultiHopFillData>,
amount: BigNumber,
comparisonPrice?: BigNumber | undefined,
quoteRequestor?: QuoteRequestor,
): ExtendedQuoteReportSources {
const sourcesConsidered: ExtendedQuoteReportEntry[] = [];
// NativeOrders
sourcesConsidered.push(
...quotes.nativeOrders.map(order =>
nativeOrderToReportEntry(
order.type,
order as any,
order.fillableTakerAmount,
comparisonPrice,
quoteRequestor,
),
),
);
// IndicativeQuotes
sourcesConsidered.push(
...quotes.rfqtIndicativeQuotes.map(order => indicativeQuoteToReportEntry(order, comparisonPrice)),
);
// MultiHop
sourcesConsidered.push(...quotes.twoHopQuotes.map(quote => multiHopSampleToReportSource(quote, marketOperation)));
// Dex Quotes
sourcesConsidered.push(
..._.flatten(
quotes.dexQuotes.map(dex =>
dex
.filter(quote => isDexSampleForTotalAmount(quote, marketOperation, amount))
.map(quote => dexSampleToReportSource(quote, marketOperation)),
),
),
);
const sourcesConsideredIndexed = sourcesConsidered.map(
(quote, index): ExtendedQuoteReportIndexedEntry => {
return {
...quote,
quoteEntryIndex: index,
isDelivered: false,
};
},
);
let sourcesDelivered;
if (Array.isArray(liquidityDelivered)) {
// create easy way to look up fillable amounts
const nativeOrderSignaturesToFillableAmounts = _.fromPairs(
quotes.nativeOrders.map(o => {
return [_nativeDataToId(o), o.fillableTakerAmount];
}),
);
// map sources delivered
sourcesDelivered = liquidityDelivered.map(collapsedFill => {
if (_isNativeOrderFromCollapsedFill(collapsedFill)) {
return nativeOrderToReportEntry(
collapsedFill.type,
collapsedFill.fillData,
nativeOrderSignaturesToFillableAmounts[_nativeDataToId(collapsedFill.fillData)],
comparisonPrice,
quoteRequestor,
);
} else {
return dexSampleToReportSource(collapsedFill, marketOperation);
}
});
} else {
sourcesDelivered = [
// tslint:disable-next-line: no-unnecessary-type-assertion
multiHopSampleToReportSource(liquidityDelivered as DexSample<MultiHopFillData>, marketOperation),
];
}
const sourcesDeliveredIndexed = sourcesDelivered.map(
(quote, index): ExtendedQuoteReportIndexedEntry => {
return {
...quote,
quoteEntryIndex: index,
isDelivered: false,
};
},
);
return {
sourcesConsidered: sourcesConsideredIndexed,
sourcesDelivered: sourcesDeliveredIndexed,
};
}
function _nativeDataToId(data: { signature: Signature }): string {
const { v, r, s } = data.signature;
return `${v}${r}${s}`;
}
/**
* Generates a report sample for a DEX source
* NOTE: this is used for the QuoteReport and quote price comparison data
*/
export function dexSampleToReportSource(ds: DexSample, marketOperation: MarketOperation): BridgeQuoteReportEntry {
const liquiditySource = ds.source;
if (liquiditySource === ERC20BridgeSource.Native) {
throw new Error(`Unexpected liquidity source Native`);
}
// input and output map to different values
// based on the market operation
if (marketOperation === MarketOperation.Buy) {
return {
makerAmount: ds.input,
takerAmount: ds.output,
liquiditySource,
fillData: ds.fillData,
};
} else if (marketOperation === MarketOperation.Sell) {
return {
makerAmount: ds.output,
takerAmount: ds.input,
liquiditySource,
fillData: ds.fillData,
};
} else {
throw new Error(`Unexpected marketOperation ${marketOperation}`);
}
}
/**
* Checks if a DEX sample is the one that represents the whole amount requested by taker
* NOTE: this is used for the QuoteReport to filter samples
*/
function isDexSampleForTotalAmount(ds: DexSample, marketOperation: MarketOperation, amount: BigNumber): boolean {
// input and output map to different values
// based on the market operation
if (marketOperation === MarketOperation.Buy) {
return ds.input === amount;
} else if (marketOperation === MarketOperation.Sell) {
return ds.output === amount;
} else {
throw new Error(`Unexpected marketOperation ${marketOperation}`);
}
}
/**
* Generates a report sample for a MultiHop source
* NOTE: this is used for the QuoteReport and quote price comparison data
*/
export function multiHopSampleToReportSource(
ds: DexSample<MultiHopFillData>,
marketOperation: MarketOperation,
): MultiHopQuoteReportEntry {
const { firstHopSource: firstHop, secondHopSource: secondHop } = ds.fillData;
// input and output map to different values
// based on the market operation
if (marketOperation === MarketOperation.Buy) {
return {
liquiditySource: ERC20BridgeSource.MultiHop,
makerAmount: ds.input,
takerAmount: ds.output,
fillData: ds.fillData,
hopSources: [firstHop.source, secondHop.source],
};
} else if (marketOperation === MarketOperation.Sell) {
return {
liquiditySource: ERC20BridgeSource.MultiHop,
makerAmount: ds.output,
takerAmount: ds.input,
fillData: ds.fillData,
hopSources: [firstHop.source, secondHop.source],
};
} else {
throw new Error(`Unexpected marketOperation ${marketOperation}`);
}
}
function _isNativeOrderFromCollapsedFill(cf: CollapsedFill): cf is NativeCollapsedFill {
const { type } = cf;
return type === FillQuoteTransformerOrderType.Limit || type === FillQuoteTransformerOrderType.Rfq;
}
/**
* Generates a report entry for a native order
* NOTE: this is used for the QuoteReport and quote price comparison data
*/
export function nativeOrderToReportEntry(
type: FillQuoteTransformerOrderType,
fillData: NativeLimitOrderFillData | NativeRfqOrderFillData,
fillableAmount: BigNumber,
comparisonPrice?: BigNumber | undefined,
quoteRequestor?: QuoteRequestor,
): NativeRfqOrderQuoteReportEntry | NativeLimitOrderQuoteReportEntry {
const nativeOrderBase = {
makerAmount: fillData.order.makerAmount,
takerAmount: fillData.order.takerAmount,
fillableTakerAmount: fillableAmount,
};
// if we find this is an rfqt order, label it as such and associate makerUri
const isRFQ = type === FillQuoteTransformerOrderType.Rfq;
const rfqtMakerUri =
isRFQ && quoteRequestor ? quoteRequestor.getMakerUriForSignature(fillData.signature) : undefined;
if (isRFQ) {
const nativeOrder = fillData.order as RfqOrderFields;
// tslint:disable-next-line: no-object-literal-type-assertion
return {
liquiditySource: ERC20BridgeSource.Native,
...nativeOrderBase,
isRFQ: true,
makerUri: rfqtMakerUri || '',
...(comparisonPrice ? { comparisonPrice: comparisonPrice.toNumber() } : {}),
nativeOrder,
fillData,
};
} else {
// tslint:disable-next-line: no-object-literal-type-assertion
return {
liquiditySource: ERC20BridgeSource.Native,
...nativeOrderBase,
isRFQ: false,
fillData,
};
}
}
/**
* Generates a report entry for an indicative RFQ Quote
* NOTE: this is used for the QuoteReport and quote price comparison data
*/
export function indicativeQuoteToReportEntry(
order: V4RFQIndicativeQuoteMM,
comparisonPrice?: BigNumber | undefined,
): IndicativeRfqOrderQuoteReportEntry {
const nativeOrderBase = {
makerAmount: order.makerAmount,
takerAmount: order.takerAmount,
fillableTakerAmount: order.takerAmount,
};
// tslint:disable-next-line: no-object-literal-type-assertion
return {
liquiditySource: ERC20BridgeSource.Native,
...nativeOrderBase,
isRFQ: true,
makerUri: order.makerUri,
fillData: {},
...(comparisonPrice ? { comparisonPrice: comparisonPrice.toNumber() } : {}),
};
}
/**
* For the extended quote report, we output the filldata as JSON
*/
export function jsonifyFillData(source: ExtendedQuoteReportIndexedEntry): ExtendedQuoteReportIndexedEntryOutbound {
return {
...source,
fillData: JSON.stringify(source.fillData, (key: string, value: any) => {
if (key === '_samplerContract') {
return {};
} else {
return value;
}
}),
};
} | the_stack |
import {Injectable, Inject, CACHE_MANAGER, BadRequestException} from '@nestjs/common';
import {Cache} from 'cache-manager';
import Mint from 'mint-filter';
import * as fs from 'fs';
import {MESSAGES} from 'src/core/enums/message.enum';
import {plainToClass} from 'class-transformer';
import {LocationService} from 'src/modules/location/location.service';
import {paging, toInteger} from 'src/core/lib';
import {GeetestService} from 'src/modules/geetest/geetest.service';
// eslint-disable-next-line @typescript-eslint/no-var-requires
// const Geetest = require('geetest');
// typeorm
import {InjectRepository} from '@nestjs/typeorm';
import {Repository} from 'typeorm';
import {OptionsEntity} from 'src/entity/options.entity';
import {CommentEntity} from 'src/entity/comment.entity';
import {ArticleEntity} from 'src/entity/article.entity';
import {FileEntity} from 'src/entity/file.entity';
@Injectable()
export class CommentService {
private mint: any;
private statusList: Array<string> = ['comment_record_ip', 'comment_status'];
constructor(
@Inject(CACHE_MANAGER) private readonly cacheManger: Cache,
@InjectRepository(CommentEntity) private readonly commentRepository: Repository<CommentEntity>,
@InjectRepository(OptionsEntity) private readonly optionsRepository: Repository<OptionsEntity>,
@InjectRepository(ArticleEntity) private readonly articleRepository: Repository<ArticleEntity>,
@InjectRepository(FileEntity) private readonly fileRepository: Repository<FileEntity>,
private readonly locationService: LocationService,
private readonly geetestService: GeetestService
) {
this.initKeywords('uploads/keywords');
}
/**
* options table update
* */
private async options_update (key: string, value: any) {
return await this.optionsRepository.update({key}, {key, value: JSON.stringify(value) });
}
private dig_params (arr, key) {
return arr.reduce((acc, val) => {
let str = null;
try {
str = JSON.parse(val.value)
} catch(e) {
str = val.value;
}
(val.key === key) ? acc = str : undefined;
return acc;
}, undefined);
}
/**
* @desc 初始化敏感词加载filter
* private
* */
private async initKeywords (keywords_path): Promise<any> {
const files = await fs.promises.readdir(keywords_path);
const key = [];
for (const file of files) {
const keywords = await this.readKeywords(`${keywords_path}/${file}`);
key.push(...keywords);
}
const value = Array.from(new Set([...key]));
this.mint = new Mint(value);
}
/**
* @desc 读取对应目录下敏感词文件
* private
* */
private readKeywords (path): Promise<any> {
let str = '';
// 创建可读流
const readerStream = fs.createReadStream(path);
// 设置编码为 utf8。
readerStream.setEncoding('utf8');
// 处理流事件
const onData = new Promise(function (resolve, reject) {
readerStream.on('data', function(chunk: any) {
str += chunk;
});
readerStream.on('end',function () {
const val = str.split(/[\r\n\,]+/).filter(v => Boolean(v));
resolve(val);
});
readerStream.on('error', function (error) {
reject(error);
})
});
return onData;
}
/**
* @desc 评论列表
* @param query {object} page, pageSize
* @return comment List
* */
public async commentLists (query) {
const { page, pageSize } = paging(query.page, query.pageSize);
const commentHandle = await this.commentRepository.createQueryBuilder('comment')
.select(['comment', 'article.title', 'article.id'])
.leftJoin('comment.article', 'article', 'article.id = comment.article_id')
.orderBy({'comment.createdAt': 'DESC', 'comment.id': 'DESC'})
.offset(page).limit(pageSize);
// 如果id不存在则查所有,否则查询指定数据
query.id && await commentHandle.where('comment.article_id = :id', {id: query.id});
const [data, count] = await commentHandle.getManyAndCount();
return {list: data, count};
}
/**
* @desc 评论列表展示文章
* @desc 相关联展示文章下存在评论数
* */
public async articleRelationsComment () {
return await this.articleRepository.createQueryBuilder('article')
.select(['article.id AS id', 'article.title AS title', 'article.createdAt AS createdAt', 'COUNT(comment.article_id) count'])
.leftJoin('article.comments', 'comment')
.groupBy('article.id')
.orderBy({ 'article.updatedAt': 'DESC', 'article.id': 'DESC' })
.getRawMany();
}
/**
* @desc 文章关联评论列表
* @desc 前端展示数据
* @return 评论列表
* */
public async commentRelevanceList (query) {
const [data, count] = await this.commentRepository.createQueryBuilder('comment')
.select(['comment.id', 'comment.username', 'comment.content', 'comment.sensitive', 'comment.pass', 'comment.createdAt'])
.where('comment.article_id = :article_id', {article_id: query.article_id})
.andWhere('comment.status = :status', {status: 1})
.orderBy({ 'comment.createdAt': 'DESC', 'comment.id': 'DESC' })
.getManyAndCount();
return {list: data, count};
}
/**
* @desc 创建评论
* @param data {Object} email, content article_id
* @return create result
* */
public async createComment (data) {
const { email, content, article_id, username, captcha } = data;
const value = await this.optionsRepository.createQueryBuilder('options')
.select(['options.key', 'options.value'])
.where('options.key IN (:...keys)', { keys: ['webservice_key', 'comment_status', 'comment_record_ip' ] })
.getMany();
const comment_status = this.dig_params(value, 'comment_status');
const webservice_key = this.dig_params(value, 'webservice_key');
const comment_record_ip = this.dig_params(value, 'comment_record_ip');
const params = plainToClass(CommentEntity, {email, content, username, article: { id: article_id } });
if (!comment_status) {
// 评论功能关闭
throw new BadRequestException(MESSAGES.COMMENT_CLOSE);
}
// 校验极验
const { success } = await this.geetestService.getCaptchaOptions();
const everyValidate = Object.keys(captcha).every(v => v !== '');
// 极验已经正常配置
if (success) {
// 校验是否前端传递过来的值为空
if (!everyValidate) {
throw new BadRequestException('极验校验不能为空');
}
// 配置校验是否通过,将从body中获取captcha来进行校验
const validateStatus = await this.geetestService.validateCaptcha({ ...captcha });
if (!validateStatus) {
throw new BadRequestException('极验校验失败,无法创建评论');
}
}
if (comment_record_ip) {
if (!webservice_key) {
// 开启评论记录IP,且暂未配置webservice ip
throw new BadRequestException(MESSAGES.WEBSERVICE_KEY_EMPTY);
}
// 获取IP地址,IP解析实际地址
const {query_ip_location: { result: {ip} }, detail_location: { result: { address } } } = await this.locationService.getLocation();
Object.assign(params, {ip, address});
}
// 过滤空格,转换小写
const text = content.toLowerCase().replace(/\s*/g, '');
const filterWords = await this.mint.filter(text);
// 合并其他参数值
Object.assign(params, {
content: filterWords.text,
sensitive: JSON.stringify(filterWords.words),
original: content,
pass: Number(filterWords.pass)
});
// 插入数据
const { raw: { affectedRows } } = await this.commentRepository.createQueryBuilder('comment')
.insert()
.into(CommentEntity)
.values(params)
.execute();
return {message: `成功创建${affectedRows}条`};
}
/**
* @desc 删除评论
* */
public async deleteComment (data) {
const coverValue = Array.isArray(data.id) ? data.id : Array.of(data.id);
const {raw: { affectedRows } } = await this.commentRepository.createQueryBuilder('comment')
.delete()
.where('comment.id IN (:...id)', {id: coverValue})
.execute();
return {message: `成功删除${affectedRows}条`};
}
/**
* ===================评论配置=========================
* */
/**
* @desc 获取评论开启状态
* @desc not auth
* */
public async getCommentSwitchStatus (keys: Array<string>) {
return await this.getCommentStatus(keys);
}
/**
* @desc 获取评论开启状态及记录IP状态
* */
public async getCommentStatus (keys : Array<string>) {
const commentStatusLists = await this.optionsRepository.createQueryBuilder('options')
.select(['options.key', 'options.value'])
.where(`options.key IN (:...key)`, {key: keys})
.getMany();
return commentStatusLists.reduce((acc,val) => (void(acc[val.key] = val.value) || acc), {});
}
/**
* @desc 开启关闭评论
* @desc 开启关闭评论记录IP功能
* @param data {Object} status
* */
public async changeCommentStatus (data) {
const { status, field } = data;
console.log(this.statusList, 'abc');
if (this.statusList.includes(field)) {
await this.options_update(field, toInteger(status));
return {message: '更新成功'};
}
throw new BadRequestException('参数值错误');
}
/**
* @desc 上传敏感词文件
* */
public async uploadKeywordsFile (file) {
const {originalname, filename, size, path} = file;
const value = {original_name: originalname, file_type: 'keywords', filename, size, path };
const data = await this.fileRepository.save(value);
// 重载敏感词
await this.initKeywords('uploads/keywords');
return {name: data.original_name, id: data.id};
}
/**
* @desc 敏感词文件列表
* */
public async uploadKeyWordsLists () {
const data = await this.fileRepository.createQueryBuilder('keywords')
.select(['keywords.id', 'keywords.original_name'])
.where("file_type = 'keywords'")
.getMany();
return { list: data };
}
/**
* @desc 删除敏感词文件
* */
public async uploadKeyWordsDelete ({ id }) {
const data = await this.fileRepository.findOne({id});
if (!data) {
// 数据库数据不存在抛出error
throw new BadRequestException(MESSAGES.DATA_NOT_EXISTS_ERROR);
}
if (!fs.existsSync(data.path)) {
// 文件不存在
throw new BadRequestException(MESSAGES.FILE_NOT_EXISTS_ERROR);
}
const {raw: { affectedRows } } = await this.fileRepository.createQueryBuilder()
.delete()
.where('id = :id', {id})
.execute();
if (affectedRows) {
// 成功在数据库中删除数据同时删除文件
await fs.unlinkSync(data.path);
// 重载敏感词
await this.initKeywords('uploads/keywords');
}
return {message: `成功删除${affectedRows}条`};
}
/**
* @desc 检测识别文本敏感词
* */
public async detectionSensitiveWords ({ text }) {
// 过滤空格,转换小写
const word = text.toLowerCase().replace(/\s*/g, '');
const { words, pass } = await this.mint.filter(word);
return { words, pass };
}
/**
* ===================极验配置=========================
* */
} | the_stack |
import { Matrix3 } from './Matrix3'
import { Matrix4 } from './Matrix4'
// import {Float32BufferAttribute} from '../core/BufferAttribute'
function arraysApproxEquals(a: Array<f32>, b: Array<f32>, tolerance: f32 = 0.0001): bool {
if (a.length != b.length) {
return false
}
for (let i = 0, il = a.length; i < il; i++) {
const delta = a[i] - b[i]
if (delta > tolerance) {
return false
}
}
return true
}
// function matrixEquals<T>(a: T, b: T, tolerance: f32 = 0.0001): bool {
// if (a.elements.length != b.elements.length) {
// return false
// }
// for (var i = 0, il = a.elements.length; i < il; i++) {
// var delta = a.elements[i] - b.elements[i]
// if (delta > tolerance) {
// return false
// }
// }
// return true
// }
function toMatrix4(m3: Matrix3): Matrix4 {
const result = new Matrix4()
let re = result.elements
const me = m3.elements
re[0] = me[0]
re[1] = me[1]
re[2] = me[2]
re[4] = me[3]
re[5] = me[4]
re[6] = me[5]
re[8] = me[6]
re[9] = me[7]
re[10] = me[8]
return result
}
describe('Matrix3', () => {
// INSTANCING
test('constructor', () => {
const a = new Matrix3()
expect(a.determinant()).toBe(1)
const b = new Matrix3()
b.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
expect(b.elements[0]).toBe(0)
expect(b.elements[1]).toBe(3)
expect(b.elements[2]).toBe(6)
expect(b.elements[3]).toBe(1)
expect(b.elements[4]).toBe(4)
expect(b.elements[5]).toBe(7)
expect(b.elements[6]).toBe(2)
expect(b.elements[7]).toBe(5)
expect(b.elements[8]).toBe(8)
expect(arraysApproxEquals(a.elements, b.elements)).toBeFalsy()
})
todo('isMatrix3')
test('set', () => {
const b = new Matrix3()
expect(b.determinant()).toBe(1)
b.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
expect(b.elements[0]).toBe(0)
expect(b.elements[1]).toBe(3)
expect(b.elements[2]).toBe(6)
expect(b.elements[3]).toBe(1)
expect(b.elements[4]).toBe(4)
expect(b.elements[5]).toBe(7)
expect(b.elements[6]).toBe(2)
expect(b.elements[7]).toBe(5)
expect(b.elements[8]).toBe(8)
})
test('identity', () => {
const b = new Matrix3()
b.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
expect(b.elements[0] == 0).toBeTruthy()
expect(b.elements[1] == 3).toBeTruthy()
expect(b.elements[2] == 6).toBeTruthy()
expect(b.elements[3] == 1).toBeTruthy()
expect(b.elements[4] == 4).toBeTruthy()
expect(b.elements[5] == 7).toBeTruthy()
expect(b.elements[6] == 2).toBeTruthy()
expect(b.elements[7] == 5).toBeTruthy()
expect(b.elements[8] == 8).toBeTruthy()
const a = new Matrix3()
expect(arraysApproxEquals(a.elements, b.elements)).toBeFalsy()
b.identity()
expect(arraysApproxEquals(a.elements, b.elements)).toBeTruthy()
})
test('clone', () => {
const a = new Matrix3()
a.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
const b = a.clone()
expect(arraysApproxEquals(a.elements, b.elements)).toBeTruthy()
// ensure that it is a true copy
a.elements[0] = 2
expect(arraysApproxEquals(a.elements, b.elements)).toBeFalsy('six')
})
test('copy', () => {
const a = new Matrix3()
a.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
const b = new Matrix3()
b.copy(a)
expect(arraysApproxEquals(a.elements, b.elements)).toBeTruthy()
// ensure that it is a true copy
a.elements[0] = 2
expect(arraysApproxEquals(a.elements, b.elements)).toBeFalsy()
})
todo('setFromMatrix4')
todo('applyToBufferAttribute')
// test('applyToBufferAttribute', assert => {
// var a = new Matrix3().set(1, 2, 3, 4, 5, 6, 7, 8, 9)
// var attr = new Float32BufferAttribute([1, 2, 1, 3, 0, 3], 3)
// var expected = new Float32Array([8, 20, 32, 12, 30, 48])
// var applied = a.applyToBufferAttribute(attr)
// assert.deepEqual(applied.array, expected, 'Check resulting buffer')
// })
test('multiply/premultiply', () => {
// both simply just wrap multiplyMatrices
const a = new Matrix3()
a.set(2, 3, 5, 7, 11, 13, 17, 19, 23)
const b = new Matrix3()
b.set(29, 31, 37, 41, 43, 47, 53, 59, 61)
const expectedMultiply: Array<f32> = [446, 1343, 2491, 486, 1457, 2701, 520, 1569, 2925]
const expectedPremultiply: Array<f32> = [904, 1182, 1556, 1131, 1489, 1967, 1399, 1845, 2435]
a.multiply(b)
expect(a.elements).toStrictEqual(expectedMultiply)
a.set(2, 3, 5, 7, 11, 13, 17, 19, 23)
a.premultiply(b)
expect(a.elements).toStrictEqual(expectedPremultiply)
})
test('multiplyMatrices', () => {
// Reference:
//
// #!/usr/bin/env python
// from __future__ import print_function
// import numpy as np
// print(
// np.dot(
// np.reshape([2, 3, 5, 7, 11, 13, 17, 19, 23], (3, 3)),
// np.reshape([29, 31, 37, 41, 43, 47, 53, 59, 61], (3, 3))
// )
// )
//
// [[ 446 486 520]
// [1343 1457 1569]
// [2491 2701 2925]]
const lhs = new Matrix3()
lhs.set(2, 3, 5, 7, 11, 13, 17, 19, 23)
const rhs = new Matrix3()
rhs.set(29, 31, 37, 41, 43, 47, 53, 59, 61)
const ans = new Matrix3()
ans.multiplyMatrices(lhs, rhs)
expect(ans.elements[0]).toBe(446)
expect(ans.elements[1]).toBe(1343)
expect(ans.elements[2]).toBe(2491)
expect(ans.elements[3]).toBe(486)
expect(ans.elements[4]).toBe(1457)
expect(ans.elements[5]).toBe(2701)
expect(ans.elements[6]).toBe(520)
expect(ans.elements[7]).toBe(1569)
expect(ans.elements[8]).toBe(2925)
})
test('multiplyScalar', () => {
const b = new Matrix3()
b.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
expect(b.elements[0]).toBe(0)
expect(b.elements[1]).toBe(3)
expect(b.elements[2]).toBe(6)
expect(b.elements[3]).toBe(1)
expect(b.elements[4]).toBe(4)
expect(b.elements[5]).toBe(7)
expect(b.elements[6]).toBe(2)
expect(b.elements[7]).toBe(5)
expect(b.elements[8]).toBe(8)
b.multiplyScalar(2)
expect(b.elements[0]).toBe(0 * 2)
expect(b.elements[1]).toBe(3 * 2)
expect(b.elements[2]).toBe(6 * 2)
expect(b.elements[3]).toBe(1 * 2)
expect(b.elements[4]).toBe(4 * 2)
expect(b.elements[5]).toBe(7 * 2)
expect(b.elements[6]).toBe(2 * 2)
expect(b.elements[7]).toBe(5 * 2)
expect(b.elements[8]).toBe(8 * 2)
})
test('determinant', () => {
const a = new Matrix3()
expect(a.determinant()).toBe(1)
a.elements[0] = 2
expect(a.determinant()).toBe(2)
a.elements[0] = 0
expect(a.determinant()).toBe(0)
// calculated via http://www.euclideanspace.com/maths/algebra/matrix/functions/determinant/threeD/index.htm
a.set(2, 3, 4, 5, 13, 7, 8, 9, 11)
expect(a.determinant()).toBe(-73)
})
test('getInverse', () => {
const identity = new Matrix3()
const identity4 = new Matrix4()
const a = new Matrix3()
const b = new Matrix3()
b.set(0, 0, 0, 0, 0, 0, 0, 0, 0)
const c = new Matrix3()
c.set(0, 0, 0, 0, 0, 0, 0, 0, 0)
b.getInverse(a)
expect(arraysApproxEquals(a.elements, identity.elements)).toBeTruthy()
expect(b.getInverse(c)).toBe(false, 'The inverse should not be calculable.')
expect(b.elements).toStrictEqual(identity.elements)
const testMatrices: Array<Matrix4> = [
new Matrix4(),
new Matrix4(),
new Matrix4(),
new Matrix4(),
new Matrix4(),
new Matrix4(),
new Matrix4(),
new Matrix4(),
]
testMatrices[0].makeRotationX(0.3)
testMatrices[1].makeRotationX(-0.3)
testMatrices[2].makeRotationY(0.3)
testMatrices[3].makeRotationY(-0.3)
testMatrices[4].makeRotationZ(0.3)
testMatrices[5].makeRotationZ(-0.3)
testMatrices[6].makeScale(1, 2, 3)
testMatrices[7].makeScale(1 / 8, 1 / 2, 1 / 3)
for (let i = 0, il = testMatrices.length; i < il; i++) {
const m = testMatrices[i]
a.setFromMatrix4(m)
b.getInverse(a)
const mInverse3 = b
const mInverse = toMatrix4(mInverse3)
// the determinant of the inverse should be the reciprocal
expect(Mathf.abs(a.determinant() * mInverse3.determinant() - 1)).toBeLessThan(0.0001)
expect(Mathf.abs(m.determinant() * mInverse.determinant() - 1)).toBeLessThan(0.0001)
const mProduct = new Matrix4()
mProduct.multiplyMatrices(m, mInverse)
expect(Mathf.abs(mProduct.determinant() - 1)).toBeLessThan(0.0001)
expect(arraysApproxEquals(mProduct.elements, identity4.elements)).toBeTruthy()
}
})
test('transpose', () => {
const a = new Matrix3()
let b = a.clone().transpose()
expect(arraysApproxEquals(a.elements, b.elements)).toBeTruthy()
b = new Matrix3()
b.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
const c = b.clone().transpose()
expect(arraysApproxEquals(b.elements, c.elements)).toBeFalsy()
c.transpose()
expect(arraysApproxEquals(b.elements, c.elements)).toBeTruthy()
})
test('getNormalMatrix', () => {
const a = new Matrix3()
const b = new Matrix4()
b.set(2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 57)
const expected = new Matrix3()
expected.set(
-1.2857142857142856,
0.7142857142857143,
0.2857142857142857,
0.7428571428571429,
-0.7571428571428571,
0.15714285714285714,
-0.19999999999999998,
0.3,
-0.09999999999999999
)
a.getNormalMatrix(b)
expect(arraysApproxEquals(a.elements, expected.elements)).toBeTruthy()
})
todo('transposeIntoArray')
// This test doesn't work as expected for some reason. Hmmmm....
todo('setUvTransform')
// test('setUvTransform', () => {
// var a = new Matrix3()
// a.set(
// 0.1767766952966369,
// 0.17677669529663687,
// 0.32322330470336313,
// -0.17677669529663687,
// 0.1767766952966369,
// 0.5,
// 0,
// 0,
// 1
// )
// var b = new Matrix3()
// const centerX = 0.5
// const centerY = 0.5
// const offsetX = 0
// const offsetY = 0
// const repeatX = 0.25
// const repeatY = 0.25
// const rotation = 0.7753981633974483
// var expected = new Matrix3()
// b.set(
// 0.1785355940258599,
// 0.17500011904519763,
// 0.32323214346447127,
// -0.17500011904519763,
// 0.1785355940258599,
// 0.4982322625096689,
// 0,
// 0,
// 1
// )
// a.setUvTransform(offsetX, offsetY, repeatX, repeatY, rotation, centerX, centerY)
// b.identity()
// .translate(-centerX, -centerY)
// .rotate(rotation)
// .scale(repeatX, repeatY)
// .translate(centerX, centerY)
// .translate(offsetX, offsetY)
// expect(arraysApproxEquals(a.elements, expected.elements)).toBeTruthy()
// expect(arraysApproxEquals(b.elements, expected.elements)).toBeTruthy()
// })
test('scale', () => {
const a = new Matrix3()
a.set(1, 2, 3, 4, 5, 6, 7, 8, 9)
const expected = new Matrix3()
expected.set(0.25, 0.5, 0.75, 1, 1.25, 1.5, 7, 8, 9)
a.scale(0.25, 0.25)
expect(arraysApproxEquals(a.elements, expected.elements)).toBeTruthy()
})
test('rotate', () => {
const a = new Matrix3()
a.set(1, 2, 3, 4, 5, 6, 7, 8, 9)
const expected = new Matrix3()
expected.set(
3.5355339059327373,
4.949747468305833,
6.363961030678928,
2.121320343559643,
2.121320343559643,
2.1213203435596433,
7,
8,
9
)
a.rotate(Mathf.PI / 4)
expect(arraysApproxEquals(a.elements, expected.elements)).toBeTruthy()
})
test('translate', () => {
const a = new Matrix3()
a.set(1, 2, 3, 4, 5, 6, 7, 8, 9)
const expected = new Matrix3()
expected.set(22, 26, 30, 53, 61, 69, 7, 8, 9)
a.translate(3, 7)
expect(arraysApproxEquals(a.elements, expected.elements)).toBeTruthy()
})
test('equals', () => {
const a = new Matrix3()
a.set(0, 1, 2, 3, 4, 5, 6, 7, 8)
const b = new Matrix3()
b.set(0, -1, 2, 3, 4, 5, 6, 7, 8)
expect(a.equals(b)).toBeFalsy()
expect(b.equals(a)).toBeFalsy()
a.copy(b)
expect(a.equals(b)).toBeTruthy()
expect(b.equals(a)).toBeTruthy()
})
todo('fromArray')
test('toArray', () => {
const a = new Matrix3()
a.set(1, 2, 3, 4, 5, 6, 7, 8, 9)
const noOffset: Array<f32> = [1, 4, 7, 2, 5, 8, 3, 6, 9]
let array = a.toArray()
expect(array).toStrictEqual(noOffset, 'No array, no offset')
array = []
a.toArray(array)
expect(array).toStrictEqual(noOffset, 'With array, no offset')
const withOffset: Array<f32> = [-999, 1, 4, 7, 2, 5, 8, 3, 6, 9]
array = [-999]
a.toArray(array, 1)
expect(array).toStrictEqual(withOffset, 'With array, with offset')
})
}) | the_stack |
import { HttpClient } from '@angular/common/http';
import { Injectable, OnDestroy } from '@angular/core';
import { environment as env } from '@env/environment';
import { del, get, keys, set } from 'idb-keyval';
import { BehaviorSubject, Subject } from 'rxjs';
import { Const, InventoryItem, InventorySocket, ItemType, SelectedUser } from './model';
import { NotificationService } from './notification.service';
const LOG_CSS = `color: mediumpurple`;
export const CUSTOM_GOD_ROLLS = 'custom-god-rolls';
export const RYKER_GOD_ROLLS_URL = 'https://docs.google.com/spreadsheets/d/1bHsAqGldtzvQnq2kIhNArr_15taYDkHEhCq7jjhSqF0/edit#gid=1523804770';
@Injectable({
providedIn: 'root',
})
export class PandaGodrollsService implements OnDestroy {
private unsubscribe$: Subject<void> = new Subject<void>();
public loaded$: BehaviorSubject<boolean> = new BehaviorSubject(false);
public meta$: BehaviorSubject<RollMeta | null> = new BehaviorSubject(null);
private data: { [name: string]: GunInfo };
public isController = true;
public matchLastTwoSockets = false;
constructor(
private notificationService: NotificationService,
private httpClient: HttpClient
) {
}
public async updateUser(selectedUser: SelectedUser) {
const controllerPref = localStorage.getItem('mnk-vs-controller');
const godRollLastTwoOnly = localStorage.getItem('god-roll-last-two-only') == 'true';
let controller = false;
if (controllerPref != null) {
controller = 'true' == controllerPref;
} else {
// if no explicit prep, assume MnK on steam, controller otherwise
if (selectedUser != null && selectedUser.userInfo.membershipType == Const.STEAM_PLATFORM.type) {
controller = false;
} else {
controller = true;
}
}
await this.update(controller, godRollLastTwoOnly);
}
public async saveSettingsAndRefreshWishlist(
controller: boolean,
godRollLastTwoOnly: boolean
) {
localStorage.setItem('mnk-vs-controller', controller ? 'true' : 'false');
localStorage.setItem('god-roll-last-two-only', godRollLastTwoOnly ? 'true' : 'false'
);
await this.update(controller, godRollLastTwoOnly);
}
public async reload() {
this.data = null;
await this.update(this.isController, this.matchLastTwoSockets);
}
public async update(
isController: boolean,
matchLastTwoSockets: boolean
): Promise<void> {
this.isController = isController;
this.matchLastTwoSockets = matchLastTwoSockets;
if (this.data != null) {
// no need to reload
} else {
const allRolls = await this.load();
const temp = allRolls.rolls;
const meta: RollMeta = {
title: allRolls.title,
date: allRolls.date,
manifestVersion: allRolls.manifestVersion
};
this.meta$.next(meta);
console.dir(meta);
const data: { [name: string]: GunInfo } = {};
for (const c of temp) {
const key = c.name;
if (data[key] == null) {
data[key] = {
mnk: null,
controller: null,
};
}
if (c.mnk) {
data[key].mnk = c;
}
if (c.controller) {
data[key].controller = c;
}
}
this.data = data;
console.log('%cLoaded ' + temp.length + ' panda guns.', LOG_CSS);
}
this.loaded$.next(true);
}
public processItems(items: InventoryItem[]): void {
if (this.data == null) {
console.log('%cNo panda data present.', LOG_CSS);
return;
}
for (const i of items) {
if (i.type !== ItemType.Weapon) {
continue;
}
// skip fixed rolls
if (!i.isRandomRoll) {
continue;
}
let name = i.name.toLowerCase();
const suffix = ' (Adept)'.toLowerCase();
if (name.endsWith(suffix)) {
name = name.substring(0, name.length - suffix.length);
}
const vogsuffix = ' (Timelost)'.toLowerCase();
if (name.endsWith(vogsuffix)) {
name = name.substring(0, name.length - vogsuffix.length);
}
const key = name;
const info = this.data[key];
if (info == null) {
i.noGodRollInfo = true;
if (i.tier == 'Legendary') {
i.searchText = i.searchText + ' is:nodata';
console.log('%cNo panda for: ' + i.name, LOG_CSS);
}
continue;
}
let rolls: GunRolls = null;
if (this.isController) {
rolls = info.controller;
} else {
rolls = info.mnk;
}
if (rolls == null) {
continue;
}
this.processGunRolls(i, rolls);
}
}
public static isFixNeeded(s: InventorySocket): boolean {
let bestPerkHad = 0;
let bestPerkSelected = 0;
for (const p of s.plugs) {
if (p.pandaPve > bestPerkHad) {
bestPerkHad = p.pandaPve;
}
if (p.pandaPvp > bestPerkHad) {
bestPerkHad = p.pandaPvp;
}
if (
p.active &&
(p.pandaPve > bestPerkSelected || p.pandaPvp > bestPerkSelected)
) {
bestPerkSelected = Math.max(p.pandaPve, p.pandaPvp);
}
}
if (bestPerkSelected == 0 && bestPerkHad > 0) {
return true;
}
return false;
}
private processGunRolls(i: InventoryItem, rolls: GunRolls) {
i.pandaPve = this.processGunRoll(i, rolls.pve, true);
i.pandaPvp = this.processGunRoll(i, rolls.pvp, false);
i.godRollInfo = '';
if (i.pandaPvp > 1) {
i.searchText = i.searchText + ' is:godrollpvp is:goodrollpvp';
i.godRollInfo = i.godRollInfo + ' is:godrollpvp is:goodrollpvp';
} else if (i.pandaPvp > 0) {
i.searchText = i.searchText + ' is:goodrollpvp';
i.godRollInfo = i.godRollInfo + ' is:goodrollpvp';
}
if (i.pandaPve > 1) {
i.searchText = i.searchText + ' is:godrollpve is:goodrollpve';
i.godRollInfo = i.godRollInfo + ' is:godrollpve is:goodrollpve';
} else if (i.pandaPve > 0) {
i.searchText = i.searchText + ' is:goodrollpve';
i.godRollInfo = i.godRollInfo + ' is:goodrollpve';
}
if (i.pandaPve < 1 && i.pandaPvp < 1) {
i.searchText = i.searchText + ' is:notgoodroll';
i.godRollInfo = i.godRollInfo + ' is:notgoodroll';
}
let needsFixing = false;
const perkSockets = i.sockets.filter(s => s.isWeaponPerk);
for (const s of perkSockets) {
const socketNeedsFixing = PandaGodrollsService.isFixNeeded(s);
needsFixing = needsFixing || socketNeedsFixing;
}
if (needsFixing) {
i.searchText = i.searchText + ' is:fixme';
i.godRollInfo = i.godRollInfo + ' is:fixme';
}
}
private static toTitleCase(phrase: string) {
return phrase
.toLowerCase()
.split(' ')
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ');
}
private processGunRoll(
i: InventoryItem,
roll: GunRoll,
pve: boolean
): number {
let goodRollFound = true;
let greatRollFound = true;
if (i.masterwork) {
const target = [];
for (const pm of roll.masterwork) {
if (i.masterwork.name.toLowerCase() == pm) {
if (pve) {
i.masterwork.godTierPve = true;
} else {
i.masterwork.godTierPvp = true;
}
}
target.push(PandaGodrollsService.toTitleCase(pm));
}
if (pve) {
i.masterwork.recommendedPveMws = target;
} else {
i.masterwork.recommendedPvpMws = target;
}
}
let cntr = 0;
// 2021-05-31 if this is a great roll that only missed by one socket, mark it as a good roll
let greatCount = 0;
const perkSockets = i.sockets.filter(s => s.isWeaponPerk);
for (const s of perkSockets) {
cntr++;
let goodPerkFound = false;
let greatPerkFound = false;
for (const p of s.plugs) {
const name = p.name.toLowerCase();
for (const goodPerk of roll.goodPerks) {
if (goodPerk == name) {
goodPerkFound = true;
if (pve) {
p.pandaPve = 1;
} else {
p.pandaPvp = 1;
}
}
}
for (const greatPerk of roll.greatPerks) {
if (greatPerk == name) {
greatPerkFound = true;
if (pve) {
p.pandaPve = 2;
} else {
p.pandaPvp = 2;
}
}
}
}
if (greatPerkFound) {
greatCount++;
}
// if we're only matching on the last 2 sockets, downgrade roll on last two sockets
if (!this.matchLastTwoSockets || cntr >= perkSockets.length - 1) {
goodRollFound = (goodPerkFound || greatPerkFound) && goodRollFound;
greatRollFound = greatPerkFound && greatRollFound;
}
for (const p of s.possiblePlugs) {
const name = p.name.toLowerCase();
for (const goodPerk of roll.goodPerks) {
if (goodPerk == name) {
if (pve) {
p.pandaPve = 1;
} else {
p.pandaPvp = 1;
}
}
}
for (const greatPerk of roll.greatPerks) {
if (greatPerk == name) {
if (pve) {
p.pandaPve = 2;
} else {
p.pandaPvp = 2;
}
}
}
}
}
// if we're doing normal processing and we don't have a good or great roll, double check to see if we only missed by one on a god roll
// if so, count it as a good roll
if (!this.matchLastTwoSockets && !(greatRollFound || goodRollFound)) {
// if we're off by only one on great rolls let's call it good
if (greatCount >= (perkSockets.length - 1)) {
goodRollFound = true;
}
}
return greatRollFound ? 2 : goodRollFound ? 1 : 0;
}
public static isValid(completeRolls: CompleteGodRolls): boolean {
if (!completeRolls || !completeRolls.rolls) {
return false;
}
if (!completeRolls.rolls?.length || !completeRolls.date || !completeRolls.manifestVersion) {
return false;
}
return true;
}
public static async getCustomGodRolls(): Promise<CompleteGodRolls | null> {
const custom: CompleteGodRolls = await get(CUSTOM_GOD_ROLLS);
if (PandaGodrollsService.isValid(custom)) {
return custom;
}
return null;
}
private async load(): Promise<CompleteGodRolls> {
const prefix = 'panda-rolls';
const t0 = performance.now();
const key = `${prefix}-${env.versions.app}`;
let completeGodRolls: CompleteGodRolls = await PandaGodrollsService.getCustomGodRolls();
let customGodRolls = false;
if (!completeGodRolls) {
completeGodRolls = await get(key);
} else {
console.log(`'%c USING CUSTOM GOD ROLLS: ${completeGodRolls.title}`, LOG_CSS);
customGodRolls = true;
}
if (completeGodRolls == null || completeGodRolls.rolls?.length == 0) {
console.log(`'%c No cached ${prefix}: ${key}`, LOG_CSS);
// clear cache
const ks = await keys();
for (const k of ks) {
if (k.toString().startsWith(prefix)) {
del(k);
}
}
completeGodRolls = await this.httpClient
.get<CompleteGodRolls>(
`/assets/panda-godrolls.min.json?v=${env.versions.app}`
)
.toPromise();
set(key, completeGodRolls);
console.log(`'%c ${prefix} downloaded, parsed and saved.`, LOG_CSS);
} else {
console.log(`'%c Using cached ${prefix}: ${key}`, LOG_CSS);
}
const t1 = performance.now();
console.log(`'%c ${t1 - t0}ms to load wishlists`, LOG_CSS);
const loadDate = new Date(completeGodRolls.date);
if (customGodRolls) {
this.notificationService.success(`CUSTOM GOD ROLLS: Loaded '${completeGodRolls.title}' from ${loadDate.toLocaleDateString()}. You can use /perkbench to undo this override`);
} else {
// this.notificationService.success(`God rolls: Loaded '${completeGodRolls.title}' from ${loadDate.toLocaleDateString()}`);
}
return completeGodRolls;
}
ngOnDestroy(): void {
this.unsubscribe$.next();
this.unsubscribe$.complete();
}
}
interface GunInfo {
mnk: GunRolls | null;
controller: GunRolls | null;
}
export interface GunRolls {
name: string;
sheet: string;
pve: GunRoll;
pvp: GunRoll;
mnk: boolean;
controller: boolean;
} // todo remove sheet and version, replace with note and URL?
export interface GunRoll {
masterwork: string[];
greatPerks: string[];
goodPerks: string[];
}
export interface CompleteGodRolls {
title: string;
date: string; // iso date format
manifestVersion: string;
rolls: GunRolls[];
}
export interface RollMeta {
title: string;
date: string; // iso date format
manifestVersion: string;
} | the_stack |
import * as sapling from '@airgap/sapling-wasm'
import BigNumber from '../../../../dependencies/src/bignumber.js-9.0.0/bignumber'
import { IAirGapTransaction } from '../../../../interfaces/IAirGapTransaction'
import { flattenArray } from '../../../../utils/array'
import { stripHexPrefix } from '../../../../utils/hex'
import { ProtocolNetwork } from '../../../../utils/ProtocolNetwork'
import { ProtocolSymbols } from '../../../../utils/ProtocolSymbols'
import { TezosSaplingCiphertext } from '../../types/sapling/TezosSaplingCiphertext'
import { TezosSaplingInput } from '../../types/sapling/TezosSaplingInput'
import { TezosSaplingOutput } from '../../types/sapling/TezosSaplingOutput'
import { TezosSaplingOutputDescription, TezosSaplingTransaction } from '../../types/sapling/TezosSaplingTransaction'
import { TezosSaplingWrappedTransaction } from '../../types/sapling/TezosSaplingWrappedTransaction'
import { TezosSaplingAddress } from '../TezosSaplingAddress'
import { TezosSaplingCryptoClient } from '../TezosSaplingCryptoClient'
import { TezosSaplingEncoder } from './TezosSaplingEncoder'
export class TezosSaplingBookkeeper {
constructor(
private readonly identifier: ProtocolSymbols,
private readonly network: ProtocolNetwork,
private readonly cryptoClient: TezosSaplingCryptoClient,
private readonly encoder: TezosSaplingEncoder
) {}
public getUnsignedTransactionDetails(
sender: TezosSaplingAddress,
inputs: TezosSaplingInput[],
outputs: TezosSaplingOutput[],
wrappedTransactions: TezosSaplingWrappedTransaction[]
): IAirGapTransaction[] {
const outputsDetails: IAirGapTransaction[] = outputs.map((out: TezosSaplingOutput) => ({
from: [sender.getValue()],
to: [out.address],
isInbound: false,
amount: out.value,
fee: '0',
protocolIdentifier: this.identifier,
network: this.network
}))
const unshieldDetails: IAirGapTransaction[] = wrappedTransactions
.map((wrappedTransaction: TezosSaplingWrappedTransaction) => {
if (wrappedTransaction.unshieldTarget === undefined) {
return undefined
}
const amount: BigNumber = this.sumNotes(inputs).minus(this.sumNotes(outputs))
return {
from: [sender.getValue()],
to: [wrappedTransaction.unshieldTarget.getValue()],
isInbound: false,
amount: amount.toFixed(),
fee: '0',
protocolIdentifier: this.identifier,
network: this.network
}
})
.filter((details: IAirGapTransaction | undefined) => details !== undefined) as IAirGapTransaction[]
return outputsDetails.concat(unshieldDetails)
}
public async getWrappedTransactionsPartialDetails(
wrappedTransactions: TezosSaplingWrappedTransaction[],
knownViewingKeys: string[] = []
): Promise<Partial<IAirGapTransaction>[]> {
const partials: Partial<IAirGapTransaction>[][] = await Promise.all(
wrappedTransactions.map(async (wrappedTransaction: TezosSaplingWrappedTransaction) => {
const transaction: TezosSaplingTransaction = this.encoder.decodeTransaction(Buffer.from(wrappedTransaction.signed, 'hex'))
const [from, details]: [string | undefined, Partial<IAirGapTransaction>[]] = await this.getTransactionPartialDetails(
transaction,
knownViewingKeys
)
if (wrappedTransaction.unshieldTarget !== undefined) {
let unshieldDetails: Partial<IAirGapTransaction> = {
to: [wrappedTransaction.unshieldTarget.getValue()],
amount: transaction.balance.toFixed()
}
if (from !== undefined) {
unshieldDetails = Object.assign(unshieldDetails, { from: [(await TezosSaplingAddress.fromViewingKey(from)).getValue()] })
}
details.push(unshieldDetails)
}
return details
})
)
return flattenArray(partials)
}
private async getTransactionPartialDetails(
transaction: TezosSaplingTransaction,
knownViewingKeys: string[]
): Promise<[string | undefined, Partial<IAirGapTransaction>[]]> {
const sender: string | undefined =
transaction.spendDescriptions.length === 0 ? undefined : await this.findSender(transaction, knownViewingKeys)
const details: Partial<IAirGapTransaction>[] = await Promise.all(
transaction.outputDescriptions.map(async (description: TezosSaplingOutputDescription) => {
const recipient: string | undefined = await this.findRecipient(description, knownViewingKeys)
const from: TezosSaplingAddress | undefined = sender !== undefined ? await TezosSaplingAddress.fromViewingKey(sender) : undefined
const [to, amount]: [TezosSaplingAddress | undefined, BigNumber | undefined] = await this.decodeDetailsFromOutputDescription(
sender,
recipient,
description
)
let outputDetails: Partial<IAirGapTransaction> = {}
if (from !== undefined) {
outputDetails = Object.assign(outputDetails, { from: [from.getValue()] })
}
if (to !== undefined) {
outputDetails = Object.assign(outputDetails, { to: [to.getValue()] })
}
if (amount !== undefined) {
outputDetails = Object.assign(outputDetails, { amount: amount.toFixed() })
}
return outputDetails
})
)
return [sender, details]
}
private async findSender(transaction: TezosSaplingTransaction, viewingKeys: string[]): Promise<string | undefined> {
return (
await Promise.all(
viewingKeys.map(async (viewingKey: string) => {
try {
// a viewing key is the sender if it can decrypt any ciphertext payload out from the transaction
const outputDescription: TezosSaplingOutputDescription = transaction.outputDescriptions[0]
await this.cryptoClient.decryptCiphertextOut(viewingKey, outputDescription.ciphertext, outputDescription.cm)
return viewingKey
} catch (error) {
return undefined
}
})
)
).find((viewingKey: string | undefined) => viewingKey !== undefined)
}
private async findRecipient(outputDescription: TezosSaplingOutputDescription, viewingKeys: string[]): Promise<string | undefined> {
return (
await Promise.all(
viewingKeys.map(async (viewingKey: string) => {
try {
// a viewing key is the recipient if it can decrypt the ciphertext payload enc from output description
await this.cryptoClient.decryptCiphertextEnc(viewingKey, outputDescription.ciphertext)
return viewingKey
} catch (error) {
return undefined
}
})
)
).find((viewingKey: string | undefined) => viewingKey !== undefined)
}
private async decodeDetailsFromOutputDescription(
sender: string | undefined,
recipient: string | undefined,
output: TezosSaplingOutputDescription
): Promise<[TezosSaplingAddress | undefined, BigNumber | undefined]> {
const viewingKey: string | undefined = sender ?? recipient
if (viewingKey !== undefined) {
const { address, amount } = await this.cryptoClient.decryptCiphertextEnc(
viewingKey,
output.ciphertext,
viewingKey === sender ? 'sender' : 'receiver',
output.cm
)
return [await TezosSaplingAddress.fromRaw(address), amount]
}
return [undefined, undefined]
}
public sumNotes(notes: (TezosSaplingInput | TezosSaplingOutput)[]): BigNumber {
return notes.reduce((sum: BigNumber, next: TezosSaplingInput | TezosSaplingOutput) => sum.plus(next.value), new BigNumber(0))
}
public async getIncomingInputs(
viewingKey: Buffer | string,
commitmentsWithCiphertext: [string, TezosSaplingCiphertext, BigNumber][]
): Promise<TezosSaplingInput[]> {
const inputs: TezosSaplingInput[] = (
await Promise.all(
commitmentsWithCiphertext.map(async ([commitment, ciphertext, position]: [string, TezosSaplingCiphertext, BigNumber]) => {
const decrypted = await this.getIncomingInputFromCiphertext(viewingKey, ciphertext, commitment, position)
if (decrypted === undefined || !(await this.verifyCommitment(decrypted[1], commitment))) {
return undefined
}
return decrypted[1]
})
)
).filter((input: TezosSaplingInput | undefined) => input !== undefined) as TezosSaplingInput[]
return inputs
}
public async getOutgoingInputs(
viewingKey: Buffer | string,
commitmentsWithCiphertext: [string, TezosSaplingCiphertext, BigNumber][]
): Promise<TezosSaplingInput[]> {
const inputs: TezosSaplingInput[] = (
await Promise.all(
commitmentsWithCiphertext.map(async ([commitment, ciphertext, position]: [string, TezosSaplingCiphertext, BigNumber]) => {
const decrypted = await this.getOutgoingInputFromCiphertext(viewingKey, ciphertext, commitment, position)
if (decrypted === undefined || (decrypted[1].address !== '' && !(await this.verifyCommitment(decrypted[1], commitment)))) {
return undefined
}
return decrypted[1]
})
)
).filter((input: TezosSaplingInput | undefined) => input !== undefined) as TezosSaplingInput[]
return inputs
}
public async getUnspends(
viewingKey: Buffer | string,
commitmentsWithCiphertext: [string, TezosSaplingCiphertext][],
nullifiers: string[]
): Promise<TezosSaplingInput[]> {
const nullifiersSet: Set<string> = new Set(nullifiers.map((nullifier: string) => stripHexPrefix(nullifier)))
const inputs: TezosSaplingInput[] = await this.getInputs(viewingKey, commitmentsWithCiphertext)
const unspends: TezosSaplingInput[] = (
await Promise.all(
inputs.map(async (input: TezosSaplingInput) => {
const nullifier: Buffer = await sapling.computeNullifier(
viewingKey,
(
await TezosSaplingAddress.fromValue(input.address)
).raw,
input.value,
input.rcm,
input.pos
)
return !nullifiersSet.has(nullifier.toString('hex')) ? input : undefined
})
)
).filter((input: TezosSaplingInput | undefined) => input !== undefined) as TezosSaplingInput[]
return unspends
}
private async getInputs(
viewingKey: Buffer | string,
commitmentsWithCiphertext: [string, TezosSaplingCiphertext][]
): Promise<TezosSaplingInput[]> {
const inputs: TezosSaplingInput[] = (
await Promise.all(
commitmentsWithCiphertext.map(async ([commitment, ciphertext]: [string, TezosSaplingCiphertext], index: number) => {
const decrypted: [Buffer, TezosSaplingInput] | undefined = await this.getReceiverInputFromCiphertext(
Buffer.isBuffer(viewingKey) ? viewingKey : Buffer.from(viewingKey, 'hex'),
ciphertext,
new BigNumber(index)
)
if (decrypted === undefined || !(await this.verifyCommitment(decrypted[1], commitment))) {
return undefined
}
return decrypted[1]
})
)
).filter((input: TezosSaplingInput | undefined) => input !== undefined) as TezosSaplingInput[]
return inputs
}
private async getReceiverInputFromCiphertext(
viewingKey: Buffer | string,
ciphertext: TezosSaplingCiphertext,
position: BigNumber
): Promise<[Buffer, TezosSaplingInput] | undefined> {
try {
const { diversifier, amount, rcm, memo } = await this.cryptoClient.decryptCiphertextEnc(viewingKey, ciphertext, 'receiver')
const ivk: Buffer = await sapling.getIncomingViewingKey(viewingKey)
const address: Buffer = await sapling.getRawPaymentAddressFromIncomingViewingKey(ivk, diversifier)
const input: TezosSaplingInput = {
rcm: rcm.toString('hex'),
pos: position.toString(),
value: amount.toString(),
address: (await TezosSaplingAddress.fromRaw(address)).getValue()
}
return [Buffer.from(memo), input]
} catch {
return undefined
}
}
private async getSenderInputFromCiphertext(
viewingKey: Buffer | string,
ciphertext: TezosSaplingCiphertext,
commitment: string,
position: BigNumber
): Promise<[Buffer, TezosSaplingInput] | undefined> {
try {
const { amount, address, rcm, memo } = await this.cryptoClient.decryptCiphertextEnc(viewingKey, ciphertext, 'sender', commitment)
const input: TezosSaplingInput = {
rcm: rcm.toString('hex'),
pos: position.toString(),
value: amount.toString(),
address: (await TezosSaplingAddress.fromRaw(address)).getValue()
}
return [Buffer.from(memo), input]
} catch {
return undefined
}
}
private async getIncomingInputFromCiphertext(
viewingKey: Buffer | string,
ciphertext: TezosSaplingCiphertext,
commitment: string,
position: BigNumber
): Promise<[Buffer, TezosSaplingInput] | undefined> {
const inputWithMemo: [Buffer, TezosSaplingInput] | undefined = await this.getReceiverInputFromCiphertext(
viewingKey,
ciphertext,
position
)
if (inputWithMemo === undefined) {
return undefined
}
try {
await this.cryptoClient.decryptCiphertextEnc(viewingKey, ciphertext, 'sender', commitment)
// ciphertext can be decrypted, the receiver is also the sender
return undefined
} catch (error) {
// ciphertext could not be decrypted, the reciever is not the sender
return inputWithMemo
}
}
private async getOutgoingInputFromCiphertext(
viewingKey: Buffer | string,
ciphertext: TezosSaplingCiphertext,
commitment: string,
position: BigNumber
): Promise<[Buffer, TezosSaplingInput] | undefined> {
const inputWithMemo: [Buffer, TezosSaplingInput] | undefined = await this.getSenderInputFromCiphertext(
viewingKey,
ciphertext,
commitment,
position
)
if (inputWithMemo === undefined) {
return undefined
}
try {
await this.cryptoClient.decryptCiphertextEnc(viewingKey, ciphertext, 'receiver')
// ciphertext can be decrypted, the sender is also the receiver
return undefined
} catch {
// ciphertext could not be decrypted, the sender is not the receiver
return inputWithMemo
}
}
private async verifyCommitment(input: TezosSaplingInput, expectedCommitment: string): Promise<boolean> {
return sapling.verifyCommitment(expectedCommitment, (await TezosSaplingAddress.fromValue(input.address)).raw, input.value, input.rcm)
}
} | the_stack |
import {
removeNode, arrayForEach, options, domData
} from '@tko/utils'
import {
observable, observableArray, isObservable
} from '@tko/observable'
import {
computed
} from '@tko/computed'
import {
contextFor, dataFor, applyBindings
} from '@tko/bind'
import { DataBindProvider } from '@tko/provider.databind'
import { VirtualProvider } from '@tko/provider.virtual'
import { MultiProvider } from '@tko/provider.multi'
import {
bindings as coreBindings
} from '@tko/binding.core'
import {
ForEachBinding
} from '../dist/foreach'
import $ from 'jquery'
beforeEach(function () {
var provider = new MultiProvider({
providers: [new DataBindProvider(), new VirtualProvider()]
})
options.bindingProviderInstance = provider
provider.bindingHandlers.set(coreBindings)
provider.bindingHandlers.set({ foreach: ForEachBinding })
// provider.bindingHandlers.set(ifBindings);
})
beforeEach(function () {
ForEachBinding.setSync(true)
})
describe('each binding', function () {
it('works with a static list', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = [1, 2, 3]
applyBindings(list, target[0])
assert.equal($(target).find('li').length, 3)
})
it('works with an observable array', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = [1, 2, 3]
applyBindings(observableArray(list), target[0])
assert.equal($(target).find('li').length, 3)
})
it('works with a plain observable with an array', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = [1, 2, 3]
applyBindings(observable(list), target[0])
assert.equal($(target).find('li').length, 3)
})
it('works with a computed observable', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = [1, 2, 3]
applyBindings(computed({read: function () { return list }}), target[0])
assert.equal($(target).find('li').length, 3)
})
it('processes initial data synchronously', function () {
ForEachBinding.setSync(false)
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = [1, 2, 3]
applyBindings(computed({ read: function () { return list } }), target[0])
assert.equal($(target).find('li').length, 3)
})
it('processes initial data synchronously but is later asynchronous', function () {
ForEachBinding.setSync(false)
// reset to the default async animateFrame
// foreac
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = observableArray([1, 2, 3])
applyBindings(list, target[0])
assert.equal($(target).find('li').length, 3)
list.push(4)
assert.equal($(target).find('li').length, 3)
// TODO: add logic to test if the update really happened
})
it('applies bindings to the immediate child', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal($(target).find('li').text(), 'abc')
})
it('applies to inner children', function () {
var target = $("<ul data-bind='foreach: $data'><li><em data-bind='text: $data'></em></li></div>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal($(target).html(), '<li><em data-bind="text: $data">a</em></li>' +
'<li><em data-bind="text: $data">b</em></li>' +
'<li><em data-bind="text: $data">c</em></li>')
})
it('works with virtual elements', function () {
var target = $("<div><!-- ko foreach: $data --><em data-bind='text: $data'></em><!-- /ko --></div>")
var list = ['A', 'B']
applyBindings(list, target[0])
assert.equal($(target).html(), '<!-- ko foreach: $data -->' +
'<em data-bind="text: $data">A</em>' +
'<em data-bind="text: $data">B</em>' +
'<!-- /ko -->')
})
it('bindings only inner (virtual) element', function () {
var target = $("<ul data-bind='foreach: $data'><!-- ko text: $data -->Z<!-- /ko --></ul>")
var list = ['E', 'V']
applyBindings(list, target[0])
assert.equal(target.html(), '<!-- ko text: $data -->E<!-- /ko -->' +
'<!-- ko text: $data -->V<!-- /ko -->')
})
it('bindings mixed inner virtual elements', function () {
var target = $("<ul data-bind='foreach: $data'>Q<!-- ko text: $data -->Z2<!-- /ko -->R</ul>")
var list = ['E2', 'V2']
applyBindings(list, target[0])
assert.equal(target.html(), 'Q<!-- ko text: $data -->E2<!-- /ko -->R' +
'Q<!-- ko text: $data -->V2<!-- /ko -->R')
})
it('uses the name/id of a <template>', function () {
var target = $("<ul data-bind='foreach: {name: \"tID\", data: $data}'>Zee</ul>")
var list = ['F1', 'F2']
var $template = $("<template id='tID'>X<!-- ko text: $data--><!--/ko--></template>")
.appendTo(document.body)
applyBindings(list, target[0])
assert.equal(target.html(), 'X<!-- ko text: $data-->F1<!--/ko-->' +
'X<!-- ko text: $data-->F2<!--/ko-->')
$template.remove()
})
it('uses the name/id of a <script>', function () {
var target = $("<ul data-bind='foreach: {name: \"tID\", data: $data}'>Zee</ul>")
var list = ['G1', 'G2']
var $template = $("<script type='text/ko-template' id='tID'></script>")
.appendTo(document.body)
$template.text('Y<!-- ko text: $data--><!--/ko-->')
applyBindings(list, target[0])
assert.equal(target.html(), 'Y<!-- ko text: $data-->G1<!--/ko-->' +
'Y<!-- ko text: $data-->G2<!--/ko-->')
$template.remove()
})
it('uses the name/id of a <div>', function () {
var target = $("<ul data-bind='foreach: {name: \"tID2\", data: $data}'>Zee</ul>")
var list = ['H1', 'H2']
var $template = $("<div id='tID2'>Z<!-- ko text: $data--><!--/ko--></div>")
.appendTo(document.body)
applyBindings(list, target[0])
assert.equal(target.html(), 'Z<!-- ko text: $data-->H1<!--/ko-->' +
'Z<!-- ko text: $data-->H2<!--/ko-->')
$template.remove()
})
})
describe('is empty/conditional', function () {
it('sets `elseChainSatisfied` to false for an empty array', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = []
var view = {obs: obs}
applyBindings(view, div[0])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), false)
})
it('sets `elseChainSatisfied` to false for an undefined obs array', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = observableArray()
var view = {obs: obs}
applyBindings(view, div[0])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), false)
})
it('sets `elseChainSatisfied` to false for an empty obs array', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = observableArray([])
var view = {obs: obs}
applyBindings(view, div[0])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), false)
})
it('sets `elseChainSatisfied` to true for a non-empty array', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = [1, 2, 3]
var view = {obs: obs}
applyBindings(view, div[0])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), true)
})
it('sets `elseChainSatisfied` to true for a non-empty obs array', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = observableArray([1, 2, 3])
var view = {obs: obs}
applyBindings(view, div[0])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), true)
})
it('sets `elseChainSatisfied` to true after array is filled', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = observableArray([])
var view = {obs: obs}
applyBindings(view, div[0])
obs([1, 2, 3])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), true)
})
it('sets `elseChainSatisfied` to false after array is emptied', function () {
var div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
var obs = observableArray([1, 2, 3])
var view = {obs: obs}
applyBindings(view, div[0])
obs([])
assert.equal(domData.get(div[0], 'conditional').elseChainSatisfied(), false)
})
})
describe('observable array changes', function () {
var div, obs, view
beforeEach(function () {
div = $("<div data-bind='foreach: obs'><i data-bind='text: $data'></i></div>")
obs = observableArray()
view = {obs: obs}
})
it('adds an item to an empty list', function () {
applyBindings(view, div[0])
obs(['a'])
assert.equal(div.text(), 'a')
})
it('adds an item to the end of a pre-existing list', function () {
obs(['a'])
applyBindings(view, div[0])
obs.push('b')
assert.equal(div.text(), 'ab')
})
it('adds an item to the beginning of a pre-existing list', function () {
obs(['a'])
applyBindings(view, div[0])
obs.unshift('b')
assert.equal(div.text(), 'ba')
})
it('adds an item to the middle of a pre-existing list', function () {
obs(['a', 'b'])
applyBindings(view, div[0])
obs.splice(1, 0, 'c')
assert.equal(div.text(), 'acb')
})
it('splices items at the beginning of a pre-existing list', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.splice(0, 1, 'd')
assert.equal(div.text(), 'dbc')
})
it('removes items at the middle of a pre-existing list', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.splice(0, 1)
assert.equal(div.text(), 'bc')
})
it('splices items at the middle of a pre-existing list', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.splice(1, 1, 'D')
assert.equal(div.text(), 'aDc')
})
it('splices items at the end of a pre-existing list', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.splice(2, 1, 'D')
assert.equal(div.text(), 'abD')
})
it('deletes the last item', function () {
obs(['a'])
applyBindings(view, div[0])
obs([])
assert.equal(div.text(), '')
})
it('deletes text nodes', function () {
div = $("<div data-bind='foreach: obs'>x<i data-bind='text: $data'></i>y</div>")
applyBindings(view, div[0])
obs(['a', 'b', 'c'])
assert.equal(div.text(), 'xayxbyxcy')
obs(['a', 'c'])
assert.equal(div.text(), 'xayxcy')
obs(['a'])
assert.equal(div.text(), 'xay')
obs([])
assert.equal(div.text(), '')
})
it('deletes from virtual elements', function () {
div = $('<div>')
div.append(document.createComment('ko foreach: obs'))
div.append($("<i data-bind='text: $data'></i>")[0])
div.append(document.createComment('/ko'))
applyBindings(view, div[0])
obs(['a', 'b', 'c'])
assert.equal(div.text(), 'abc')
obs(['a', 'c'])
assert.equal(div.text(), 'ac')
obs(['a'])
assert.equal(div.text(), 'a')
obs([])
assert.equal(div.text(), '')
obs(['a', 'b'])
assert.equal(div.text(), 'ab')
obs([])
assert.equal(div.text(), '')
obs(['a', 'b', 'c'])
assert.equal(div.text(), 'abc')
obs(['a'])
assert.equal(div.text(), 'a')
obs(['a', 'b', 'c'])
assert.equal(div.text(), 'abc')
obs(['c'])
assert.equal(div.text(), 'c')
})
it('deletes from the beginning / shift', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.shift()
assert.equal(div.text(), 'bc')
})
it('deletes from the beginning / pop', function () {
obs(['a', 'b', 'c'])
applyBindings(view, div[0])
obs.pop()
assert.equal(div.text(), 'ab')
})
it('combines multiple adds and deletes', function () {
obs(['A', 'B', 'C', 'D', 'E', 'F'])
applyBindings(view, div[0])
obs(['x', 'B', 'C', 'D', 'z', 'F'])
assert.equal(div.text(), 'xBCDzF')
})
it('processes multiple deletes', function () {
// Per issue #6
applyBindings(view, div[0])
obs([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert.equal(div.text(), '0123456789')
obs([1, 2, 3, 4, 5, 6, 7, 8])
assert.equal(div.text(), '12345678')
obs([2, 3, 4, 5, 6, 7, 8, 9])
assert.equal(div.text(), '23456789')
obs([3, 4, 5, 6, 7, 8, 9])
assert.equal(div.text(), '3456789')
obs([2, 3, 4, 5, 6, 7, 8, 9])
assert.equal(div.text(), '23456789')
obs([6, 7, 8, 9])
assert.equal(div.text(), '6789')
obs([1, 2, 3, 6, 7, 8])
assert.equal(div.text(), '123678')
obs([0, 1, 2, 3, 4])
assert.equal(div.text(), '01234')
obs([1, 2, 3, 4])
assert.equal(div.text(), '1234')
obs([3, 4])
assert.equal(div.text(), '34')
obs([3])
assert.equal(div.text(), '3')
obs([])
assert.equal(div.text(), '')
})
it('processes numerous changes', function () {
applyBindings(view, div[0])
obs([5, 6, 7, 8, 9])
assert.equal(div.text(), '56789')
obs([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert.equal(div.text(), '0123456789')
obs(['a', 'b', 'c'])
assert.equal(div.text(), 'abc')
})
it('processes numerous changes with splice', function () {
applyBindings(view, div[0])
obs([5, 6, 7, 8, 9])
assert.equal(div.text(), '56789')
obs.splice(1, 2, 16, 17)
assert.equal(div.text(), '5161789')
obs.splice(0, 5, 'a', 'b', 'c')
assert.equal(div.text(), 'abc')
})
it('accepts changes via a computed observable', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var toggle = observable(true)
var list1 = [1, 2, 3]
var list2 = [1, 2, 3, 4, 5, 6]
applyBindings(computed({
read: function () { return toggle() ? list1 : list2 }
}), target[0])
assert.equal(target.text(), '123')
toggle(false)
assert.equal(target.text(), '123456')
})
describe('DOM move capabilities', function () {
it('sorting complex data moves 1 DOM node', function () {
div = $("<div data-bind='foreach: obs'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
obs([{ id: 4, testHtml: '<span>A</span>' }, { id: 6, testHtml: '<span>B</span>' }, { id: 1, testHtml: '<span>C</span>' }])
var nodes = div.children().toArray()
assert.equal(div.text(), 'ABC')
obs.sort(function (a, b) { return a.id - b.id })
var nodes2 = div.children().toArray()
assert.strictEqual(nodes[1], nodes2[2])
assert.strictEqual(nodes[2], nodes2[0])
assert.strictEqual(nodes[0], nodes2[1])
assert.equal(div.text(), 'CAB')
})
it('sorting complex data moves all DOM nodes', function () {
div = $("<div data-bind='foreach: obs'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
obs([{ id: 7, testHtml: '<span>A</span>' }, { id: 6, testHtml: '<span>B</span>' }, { id: 1, testHtml: '<span>C</span>' }, { id: 9, testHtml: '<span>D</span>' }])
var nodes = div.children().toArray()
assert.equal(div.text(), 'ABCD')
obs.reverse()
var nodes2 = div.children().toArray()
assert.strictEqual(nodes[0], nodes2[3])
assert.strictEqual(nodes[1], nodes2[2])
assert.strictEqual(nodes[2], nodes2[1])
assert.strictEqual(nodes[3], nodes2[0])
assert.equal(div.text(), 'DCBA')
})
it('sorting complex data recreates DOM nodes if move disabled', function () {
var originalShouldDelayDeletion = ForEachBinding.prototype.shouldDelayDeletion
ForEachBinding.prototype.shouldDelayDeletion = function (/* data */) { return false }
div = $("<div data-bind='foreach: { data: obs }'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
obs([{ id: 7, testHtml: '<span>A</span>' }, { id: 6, testHtml: '<span>B</span>' }, { id: 1, testHtml: '<span>C</span>' }])
var nodes = div.children().toArray()
assert.equal(div.text(), 'ABC')
obs.sort(function (a, b) { return a.id - b.id })
var nodes2 = div.children().toArray()
assert.equal(div.text(), 'CBA')
assert.notStrictEqual(nodes[1], nodes2[2])
assert.notStrictEqual(nodes[2], nodes2[0])
assert.notStrictEqual(nodes[0], nodes2[1])
ForEachBinding.prototype.shouldDelayDeletion = originalShouldDelayDeletion
})
it('Sort large complex array makes correct DOM moves', function () {
var itemNumber = 100
div = $("<div data-bind='foreach: { data: obs }'><div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div></div></div>")
applyBindings(view, div[0])
var arr = [], i
for (i = 0; i != itemNumber; ++i) {
arr.push({ id: Math.floor(Math.random() * itemNumber), testHtml: '<span>Item ' + i + '</span>' })
}
obs(arr)
assert.equal(div.children().length, itemNumber)
div.children().prop('testprop', 10)
// console.time("with move");
obs.sort(function (a, b) { return a.id - b.id })
// console.timeEnd("with move");
for (i = 0; i != itemNumber; ++i) {
arr[i].num = i
}
assert.equal(div.children().length, itemNumber)
assert.equal(div.children().filter(function () { return this.testprop == 10 }).length, itemNumber)
div.children().each(function (index) {
assert.equal(index, dataFor(this).num)
})
})
it('Sort large complex array makes correct DOM order without move', function () {
var originalShouldDelayDeletion = ForEachBinding.prototype.shouldDelayDeletion
ForEachBinding.prototype.shouldDelayDeletion = function (/* data */) { return false }
var itemNumber = 100
div = $("<div data-bind='foreach: { data: obs }'><div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div><div data-bind='html: testHtml'></div></div></div>")
applyBindings(view, div[0])
var arr = [], i
for (i = 0; i != itemNumber; ++i) {
arr.push({ id: Math.floor(Math.random() * itemNumber), testHtml: '<span>Item ' + i + '</span>' })
}
obs(arr)
assert.equal(div.children().length, itemNumber)
obs.sort(function (a, b) { return a.id - b.id })
for (i = 0; i != itemNumber; ++i) {
arr[i].num = i
}
assert.equal(div.children().length, itemNumber)
div.children().each(function (index) {
assert.equal(index, dataFor(this).num)
})
ForEachBinding.prototype.shouldDelayDeletion = originalShouldDelayDeletion
})
it('processes duplicate data 1', function () {
div = $("<div data-bind='foreach: obs'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
var itemA = { id: 4, testHtml: '<span>A</span>' }
var itemB = { id: 6, testHtml: '<span>B</span>' }
obs([itemB, itemA, itemA, itemA])
var nodes = div.children().toArray()
assert.equal(div.text(), 'BAAA')
obs([itemA, itemB])
var nodes2 = div.children().toArray()
assert.strictEqual(nodes[3], nodes2[0])
assert.strictEqual(nodes[0], nodes2[1])
assert.equal(div.text(), 'AB')
})
it('processes duplicate data 2', function () {
div = $("<div data-bind='foreach: obs'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
var itemA = { id: 4, testHtml: '<span>A</span>' }
var itemB = { id: 6, testHtml: '<span>B</span>' }
var others = [1, 2, 3, 4].map(function (e) { return { id: e, testHtml: '' } })
obs([itemB, others[0], others[1], others[2], others[3], itemA, itemA])
// var nodes =
div.children().each(function () { this.test = 1 }).toArray()
assert.equal(div.text(), 'BAA')
obs([itemB, itemA, itemA, itemA, itemA, others[0], others[1], others[2], others[3]])
// var nodes2 =
div.children().toArray()
// reuses two 'A' node set
assert.equal(div.children().filter(function () { return this.test == 1 }).length, 7)
// ... and creates two new
assert.equal(div.children().filter(function () { return this.test === undefined }).length, 2)
assert.equal(div.text(), 'BAAAA')
})
it('processes changes from more changesets 1', function () {
var originalAnimateFrame = ForEachBinding.animateFrame
ForEachBinding.animateFrame = function () { }
ForEachBinding.setSync(false)
div = $("<div data-bind='visible: true'></div>")
applyBindings({}, div[0])
var itemA = { id: 4, testHtml: '<span>A</span>' }
var others = [11, 12, 13, 14].map(function (e) { return { id: e, testHtml: 'C' + e } })
obs([itemA, others[0], others[1], others[2], others[3]])
// manual initialization to be able to access processQueue method
var ffe = new ForEachBinding({
$element: div[0],
$context: contextFor(div[0]),
allBindings: { get () {} },
valueAccessor () {
return {
data: obs,
templateNode: $("<template><div data-bind='html: testHtml'></div></template>")[0]
}
}
})
ffe.processQueue()
// var nodes =
div.children().each(function () { this.test = 1 }).toArray()
assert.equal(div.text(), 'AC11C12C13C14')
obs([others[0], others[1], others[2], others[3], itemA])
obs([others[1], itemA, others[2], others[3]])
obs.sort(function (a, b) { return b.id - a.id })
assert.equal(div.text(), 'AC11C12C13C14')
ffe.processQueue()
assert.equal(div.text(), 'C14C13C12A')
// moved all five nodes around
assert.equal(div.children().filter(function () { return this.test == 1 }).length, 4)
ForEachBinding.animateFrame = originalAnimateFrame
})
it('processes changes from more changesets 2', function () {
var originalAnimateFrame = ForEachBinding.animateFrame
ForEachBinding.animateFrame = function () { }
ForEachBinding.setSync(false)
div = $("<div data-bind='visible: true'></div>")
applyBindings({}, div[0])
var itemA = { id: 4, testHtml: '<span>A</span>' }
var itemB = { id: 5, testHtml: '<span>B</span>' }
obs([itemA, itemB])
// manual initialization to be able to access processQueue method
var ffe = new ForEachBinding({
$element: div[0],
valueAccessor () {
return {
data: obs,
templateNode: $("<script type='text/html'><div data-bind='html: testHtml'></div></script>")[0]
}
},
allBindings: { get () {} },
$context: contextFor(div[0])
})
ffe.processQueue()
// var nodes =
div.children().each(function () { this.test = 1 }).toArray()
assert.equal(div.text(), 'AB')
obs.remove(itemB)
obs.push(itemB)
obs.remove(itemB)
obs.push(itemB)
obs.remove(itemB)
obs.push(itemB)
assert.equal(div.text(), 'AB')
ffe.processQueue()
assert.equal(div.text(), 'AB')
assert.equal(div.children().filter(function () { return this.test === 1 }).length, 2)
ForEachBinding.animateFrame = originalAnimateFrame
})
it('cleans data objects', function () {
div = $("<div data-bind='foreach: obs'><div data-bind='html: testHtml'></div></div>")
applyBindings(view, div[0])
var itemA = { id: 4, testHtml: '<span>A</span>' }
var itemB = { id: 6, testHtml: '<span>B</span>' }
var itemC = { id: 6, testHtml: '<span>C</span>' }
obs([itemA, itemB, itemC, itemA])
var nodes = div.children().toArray()
assert.equal(div.text(), 'ABCA')
obs([itemC, itemA, itemB])
var nodes2 = div.children().toArray()
assert.equal(itemA[ForEachBinding.PENDING_DELETE_INDEX_SYM], undefined)
assert.equal(itemB[ForEachBinding.PENDING_DELETE_INDEX_SYM], undefined)
assert.equal(itemC[ForEachBinding.PENDING_DELETE_INDEX_SYM], undefined)
assert.equal(nodes[0], nodes2[1])
assert.equal(div.text(), 'CAB')
})
})
describe('afterAdd', function () {
it('emits on changes to an observable array', function () {
var calls = 0
var nodes = 0
var arr = observableArray([])
function cb (v) { calls++; nodes += v.nodeOrArrayInserted.length }
var target = $("<ul data-bind='foreach: { data: arr, afterAdd: cb }'><li data-bind='text: $data'></li></div>")
applyBindings({arr: arr, cb: cb}, target[0])
assert.equal(calls, 0)
assert.equal(nodes, 0)
arr.push('x')
assert.equal(calls, 1)
assert.equal(nodes, 1)
arr([2, 3, 4])
assert.equal(calls, 2)
assert.equal(nodes, 4, 'n4')
})
it('is called with initial data', function () {
var calls = 0
var nodes = 0
var arr = observableArray(['a', 'b', 'c'])
function cb (v) { calls++; nodes += v.nodeOrArrayInserted.length }
var target = $("<ul data-bind='foreach: { data: arr, afterAdd: cb }'><li data-bind='text: $data'></li></div>")
applyBindings({arr: arr, cb: cb}, target[0])
assert.equal(calls, 1)
assert.equal(nodes, 3)
})
})
describe('beforeRemove', function () {
it('emits on remove', function () {
var cbi = 0
var arr = observableArray(['a1', 'b1', 'c1'])
function cb (v) {
arrayForEach(v.nodesToRemove, function (n) { removeNode(n) })
cbi++
}
var target = $("<ul data-bind='foreach: { data: arr, beforeRemove: cb }'><li data-bind='text: $data'></li></div>")
applyBindings({arr: arr, cb: cb}, target[0])
assert.equal(cbi, 0)
assert.equal(target.text(), 'a1b1c1')
arr.pop()
assert.equal(target.text(), 'a1b1')
assert.equal(cbi, 1)
arr([])
assert.equal(cbi, 3)
assert.equal(target.text(), '')
})
it('removes an element if a `then`-able is passed', function () {
var cbi = 0
var arr = observableArray(['a2', 'b2', 'c2'])
function cb (/* v */) { cbi++; return {then: function (cb) { cb() }} }
var target = $("<ul data-bind='foreach: { data: arr, beforeRemove: cb }'><li data-bind='text: $data'></li></div>")
applyBindings({arr: arr, cb: cb}, target[0])
assert.equal(cbi, 0)
assert.equal(target.text(), 'a2b2c2')
arr.pop()
assert.equal(target.text(), 'a2b2')
assert.equal(cbi, 1)
arr([])
assert.equal(cbi, 3)
assert.equal(target.text(), '')
})
})
describe('$index', function () {
it('is present on the children', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></ul>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
})
it('is present on children of virtual nodes', function () {
var target = $('<div><!-- ko foreach: $data -->' +
"<b data-bind='text: $data'></b>" +
'<!-- /ko --></div>')
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
})
it('is present when template starts with a text node', function () {
var target = document.createElement('ul')
target.innerHTML = "<ul data-bind='foreach: $data'>" +
" <li data-bind='text: $index()'></li>" +
'</ul>'
var list = ['a', 'b', 'c']
applyBindings(list, target)
assert.equal($(target).text(), ' 0 1 2')
})
it('is present on a list of text & comment nodes', function () {
var target = document.createElement('ul')
target.innerHTML = `<div data-bind='foreach: $data'>
<!-- ko text: $index --><!-- /ko --><!-- ko text: $data --><!-- /ko -->
</div>`
var list = ['a', 'b', 'c']
applyBindings(list, target)
assert.equal($(target).text().replace(/\s+/g, ' '), ' 0a 1b 2c ')
})
it('updates as part of a calculation', function () {
var target = document.createElement('ul')
target.innerHTML = `<div data-bind='foreach: $data'>
<!-- ko text: $index() * 10 --><!-- /ko --><!-- ko text: $data --><!-- /ko -->
</div>`
var list = ['a', 'b', 'c']
applyBindings(list, target)
assert.equal($(target).text().replace(/\s+/g, ' '), ' 0a 10b 20c ')
})
it('updates in the middle of a list', function () {
var target = document.createElement('ul')
target.innerHTML = `<div data-bind='foreach: $data'>
<!-- ko text: $data === 'b' ? $index() * 10 : '-' --><!-- /ko -->
<!-- ko text: $data --><!-- /ko -->
</div>`
var list = ['a', 'b', 'c']
applyBindings(list, target)
assert.equal($(target).text().replace(/\s+/g, ' '), ' - a 10 b - c ')
})
it('updates when list is modified', function () {
var target = document.createElement('ul')
target.innerHTML = `<div data-bind='foreach: $data'>
<!-- ko text: $index() * 10 --><!-- /ko --><!-- ko text: $data --><!-- /ko -->
</div>`
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target)
list.splice(0, 0, 'z')
assert.equal($(target).text().replace(/\s+/g, ' '), ' 0z 10a 20b 30c ')
list.splice(2, 1)
assert.equal($(target).text().replace(/\s+/g, ' '), ' 0z 10a 20c ')
})
it('updates the first list item', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></ul>")
var list = observableArray([])
applyBindings(list, target[0])
list.push('a')
assert.equal(contextFor(target.children()[0]).$index(), 0)
})
it('updates on append', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></ul>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
list.push('d')
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
assert.equal(contextFor(target.children()[3]).$index(), 3)
})
it('updates on prepend', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></ul>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
list.unshift('e')
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
assert.equal(contextFor(target.children()[3]).$index(), 3)
})
it('updates on splice', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></ul>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
// Delete 2 at 1, insert 2
list.splice(1, 2, 'r', 'q')
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
})
it('is not initially an observable', function () {
var target = $("<ul data-bind='foreach: {data: $data, noIndex: true}'><li data-bind='text: $data'></li></ul>")
var list = observableArray(['a'])
applyBindings(list, target[0])
assert.notOk(isObservable(contextFor(target.children()[0]).$index))
})
it('is observable after the first call', function () {
var target = $("<ul data-bind='foreach: {data: $data, noIndex: true}'><li data-bind='text: $data'></li></ul>")
var list = observableArray(['a'])
applyBindings(list, target[0])
const $index = contextFor(target.children()[0]).$index
assert.equal($index(), 0)
assert.ok(isObservable(contextFor(target.children()[0]).$index))
})
it('is present with `as`', function () {
var target = $("<ul data-bind='foreach: {data: $data, as: \"$item\"}'><li data-bind='text: $item'></li></ul>")
var list = observableArray(['a', 'b'])
applyBindings(list, target[0])
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
})
})
describe('`as` parameter', function () {
it('is used when present', function () {
var target = $("<ul data-bind='foreach: { data: $data, as: \"xyz\" }'><li data-bind='text: xyz'></li></ul>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal(target.text(), 'abc')
})
it('each item has the same $data as its parent', function () {
var target = $("<ul data-bind='foreach: { data: $data, as: \"xyz\" }'><li data-bind='text: xyz'></li></ul>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.strictEqual(dataFor(target.children()[0]).$data, dataFor(target))
assert.strictEqual(dataFor(target.children()[1]).$data, dataFor(target))
assert.strictEqual(dataFor(target.children()[2]).$data, dataFor(target))
})
it('has an $index', function () {
var target = $("<ul data-bind='foreach: { data: $data, as: \"xyz\" }'><li data-bind='text: xyz'></li></ul>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal(contextFor(target.children()[0]).$index(), 0)
assert.equal(contextFor(target.children()[1]).$index(), 1)
assert.equal(contextFor(target.children()[2]).$index(), 2)
})
it('reads `as` from peer binding parameters', function () {
var target = $("<ul data-bind='foreach: $data, as: \"xyz\"'><li data-bind='text: xyz'></li></ul>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.equal(target.text(), 'abc')
})
})
})
describe('focus', function () {
var $target
beforeEach(function () {
$target = $("<div data-bind='foreach: $data'>" +
'<input />' +
'</div>')
.appendTo(document.body)
ForEachBinding.setSync(false)
})
afterEach(function () {
$target.remove()
})
it('does not preserve the target on apply bindings', function (done) {
var list = ['a', 'b', 'c']
$target.find(':input').focus()
applyBindings(list, $target[0])
setTimeout(function () {
assert.strictEqual(document.activeElement, document.body)
done()
}, 50)
})
it('does not preserves primitive targets when re-ordering', function (done) {
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, $target[0])
$target.find(':input').first().focus()
assert.strictEqual(document.activeElement, $target.find(':input')[0])
list.remove('a')
list.push('a')
setTimeout(function () {
assert.strictEqual(document.activeElement, document.body)
done()
}, 50)
})
it('preserves objects when re-ordering', function (done) {
var o0 = {}
var list = observableArray([o0, 'b', 'c'])
applyBindings(list, $target[0])
$target.find(':input').first().focus()
assert.strictEqual(document.activeElement, $target.find(':input')[0], 'a')
list.remove(o0)
list.push(o0)
setTimeout(function () {
assert.strictEqual(document.activeElement, $target.find(':input')[2], 'o')
done()
}, 50)
})
it('preserves objects when re-ordering multiple identical', function (done) {
var o0 = {}
var list = observableArray([o0, 'b', 'c'])
applyBindings(list, $target[0])
$target.find(':input').first().focus()
assert.strictEqual(document.activeElement, $target.find(':input')[0], 'a')
list.remove(o0)
list.push('x')
list.push(o0)
list.push('y')
setTimeout(function () {
assert.strictEqual(document.activeElement, $target.find(':input')[3], 'o')
done()
}, 50)
})
it('preserves objects when re-ordering multiple identical, alt', function (done) {
var o0 = {}
var list = observableArray([o0, 'b', 'c'])
applyBindings(list, $target[0])
$target.find(':input').first().focus()
assert.strictEqual(document.activeElement, $target.find(':input')[0], 'a')
list.remove(o0)
list.push(o0) // focused
list.push(o0)
setTimeout(function () {
assert.strictEqual(document.activeElement, $target.find(':input')[2], 'o')
done()
}, 50)
})
})
describe('$list', function () {
it('exposes a list', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = ['a', 'b', 'c']
applyBindings(list, target[0])
assert.strictEqual(
contextFor(target.children()[1]).$list, list
)
})
it('exposes an observable array', function () {
var target = $("<ul data-bind='foreach: $data'><li data-bind='text: $data'></li></div>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
assert.strictEqual(
contextFor(target.children()[1]).$list, list
)
})
it('exposes an observable array with `as`', function () {
var target = $("<ul data-bind='foreach: $data, as: \"x\"'><li data-bind='text: x'></li></div>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
assert.strictEqual(
contextFor(target.children()[1]).$list, list
)
})
it('exposes an observable array with `as` + noIndex', function () {
var target = $("<ul data-bind='foreach: $data, as: \"x\", noIndex: true'><li data-bind='text: x'></li></div>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
assert.strictEqual(
contextFor(target.children()[1]).$list, list
)
})
it('exposes an observable array with noIndex', function () {
var target = $("<ul data-bind='foreach: $data, noIndex: true'><li data-bind='text: $data'></li></div>")
var list = observableArray(['a', 'b', 'c'])
applyBindings(list, target[0])
assert.strictEqual(
contextFor(target.children()[1]).$list, list
)
})
}) | the_stack |
import RouteRecognizer, {
QueryParams,
Result,
Results
} from "../lib/route-recognizer";
let router: RouteRecognizer<string>;
function resultsMatch(
assert: Assert,
results: Results<string> | undefined,
array: Result<string>[],
queryParams?: QueryParams
): void {
assert.deepEqual(results && results.slice(), array);
if (queryParams) {
assert.deepEqual(results && results.queryParams, queryParams);
}
}
function matchesRoute(
assert: Assert,
path: string,
expected: Result<string>[],
queryParams?: QueryParams
): void {
const actual = router.recognize(path);
resultsMatch(assert, actual, expected, queryParams);
}
QUnit.module("The match DSL", hooks => {
hooks.beforeEach(() => {
router = new RouteRecognizer();
});
QUnit.test("supports multiple calls to match", assert => {
router.map(function(match) {
match("/posts/new").to("newPost");
match("/posts/:id").to("showPost");
match("/posts/edit").to("editPost");
});
matchesRoute(assert, "/posts/new", [
{ handler: "newPost", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1", [
{ handler: "showPost", params: { id: "1" }, isDynamic: true }
]);
matchesRoute(assert, "/posts/edit", [
{ handler: "editPost", params: {}, isDynamic: false }
]);
});
QUnit.test(
"supports multiple calls to match with query params",
(assert: Assert) => {
router.map(function(match) {
match("/posts/new").to("newPost");
match("/posts/:id").to("showPost");
match("/posts/edit").to("editPost");
});
matchesRoute(
assert,
"/posts/new?foo=1&bar=2",
[{ handler: "newPost", params: {}, isDynamic: false }],
{ foo: "1", bar: "2" }
);
matchesRoute(
assert,
"/posts/1?baz=3",
[{ handler: "showPost", params: { id: "1" }, isDynamic: true }],
{ baz: "3" }
);
matchesRoute(
assert,
"/posts/edit",
[{ handler: "editPost", params: {}, isDynamic: false }],
{}
);
}
);
QUnit.test("supports nested match", (assert: Assert) => {
router.map(function(match) {
match("/posts", function(match) {
match("/new").to("newPost");
match("/:id").to("showPost");
match("/edit").to("editPost");
});
});
matchesRoute(assert, "/posts/new", [
{ handler: "newPost", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1", [
{ handler: "showPost", params: { id: "1" }, isDynamic: true }
]);
matchesRoute(assert, "/posts/edit", [
{ handler: "editPost", params: {}, isDynamic: false }
]);
});
QUnit.test(
"support nested dynamic routes and star route",
(assert: Assert) => {
router.map(function(match) {
match("/:routeId").to("routeId", function(match) {
match("/").to("routeId.index");
match("/:subRouteId").to("subRouteId");
});
match("/*wildcard").to("wildcard");
});
// fails because it incorrectly matches the wildcard route
matchesRoute(assert, "/abc", [
{ handler: "routeId", params: { routeId: "abc" }, isDynamic: true },
{ handler: "routeId.index", params: {}, isDynamic: false }
]);
// passes
matchesRoute(assert, "/abc/def", [
{ handler: "routeId", params: { routeId: "abc" }, isDynamic: true },
{
handler: "subRouteId",
params: { subRouteId: "def" },
isDynamic: true
}
]);
// fails because no route is recognized
matchesRoute(assert, "/abc/def/ghi", [
{
handler: "wildcard",
params: { wildcard: "abc/def/ghi" },
isDynamic: true
}
]);
}
);
QUnit.test("supports nested match with query params", (assert: Assert) => {
router.map(function(match) {
match("/posts", function(match) {
match("/new").to("newPost");
match("/:id").to("showPost");
match("/edit").to("editPost");
});
});
matchesRoute(
assert,
"/posts/new?foo=1&bar=2",
[{ handler: "newPost", params: {}, isDynamic: false }],
{ foo: "1", bar: "2" }
);
matchesRoute(
assert,
"/posts/1?baz=3",
[{ handler: "showPost", params: { id: "1" }, isDynamic: true }],
{ baz: "3" }
);
matchesRoute(
assert,
"/posts/edit",
[{ handler: "editPost", params: {}, isDynamic: false }],
{}
);
});
QUnit.test(
"not passing a function with `match` as a parameter raises",
(assert: Assert) => {
assert.throws(function() {
router.map(function(match) {
match("/posts").to("posts", () => void 0);
});
});
}
);
QUnit.test("supports nested handlers", (assert: Assert) => {
router.map(function(match) {
match("/posts").to("posts", function(match) {
match("/new").to("newPost");
match("/:id").to("showPost");
match("/edit").to("editPost");
});
});
matchesRoute(assert, "/posts/new", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "newPost", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "1" }, isDynamic: true }
]);
matchesRoute(assert, "/posts/edit", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "editPost", params: {}, isDynamic: false }
]);
});
QUnit.test("supports deeply nested handlers", (assert: Assert) => {
router.map(function(match) {
match("/posts").to("posts", function(match) {
match("/new").to("newPost");
match("/:id").to("showPost", function(match) {
match("/index").to("postIndex");
match("/comments").to("postComments");
});
match("/edit").to("editPost");
});
});
matchesRoute(assert, "/posts/new", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "newPost", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1/index", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "1" }, isDynamic: true },
{ handler: "postIndex", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1/comments", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "1" }, isDynamic: true },
{ handler: "postComments", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/ne/comments", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "ne" }, isDynamic: true },
{ handler: "postComments", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/edit", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "editPost", params: {}, isDynamic: false }
]);
});
QUnit.test("supports index-style routes", (assert: Assert) => {
router.map(function(match) {
match("/posts").to("posts", function(match) {
match("/new").to("newPost");
match("/:id").to("showPost", function(match) {
match("/").to("postIndex");
match("/comments").to("postComments");
});
match("/edit").to("editPost");
});
});
matchesRoute(assert, "/posts/new", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "newPost", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "1" }, isDynamic: true },
{ handler: "postIndex", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1/comments", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "showPost", params: { id: "1" }, isDynamic: true },
{ handler: "postComments", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/edit", [
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "editPost", params: {}, isDynamic: false }
]);
});
QUnit.test("supports single `/` routes", (assert: Assert) => {
router.map(function(match) {
match("/").to("posts");
});
matchesRoute(assert, "/", [
{ handler: "posts", params: {}, isDynamic: false }
]);
});
QUnit.test("supports star routes", (assert: Assert) => {
router.map(function(match) {
match("/").to("posts");
match("/*everything").to("404");
});
// randomly generated strings
[
"w6PCXxJn20PCSievuP",
"v2y0gaByxHjHYJw0pVT1TeqbEJLllVq-3",
"DFCR4rm7XMbT6CPZq-d8AU7k",
"d3vYEg1AoYaPlM9QbOAxEK6u/H_S-PYH1aYtt"
].forEach(function(r) {
matchesRoute(assert, "/" + r, [
{ handler: "404", params: { everything: r }, isDynamic: true }
]);
});
});
QUnit.test("star route does not swallow trailing `/`", (assert: Assert) => {
router.map(function(match) {
match("/").to("posts");
match("/*everything").to("glob");
});
const r = "folder1/folder2/folder3/";
matchesRoute(assert, "/" + r, [
{ handler: "glob", params: { everything: r }, isDynamic: true }
]);
});
QUnit.test("support star route before other segment", (assert: Assert) => {
router.map(function(match) {
match("/*everything/:extra").to("glob");
});
[
"folder1/folder2/folder3//the-extra-stuff/",
"folder1/folder2/folder3//the-extra-stuff"
].forEach(function(r) {
matchesRoute(assert, "/" + r, [
{
handler: "glob",
params: {
everything: "folder1/folder2/folder3/",
extra: "the-extra-stuff"
},
isDynamic: true
}
]);
});
});
QUnit.test("support nested star route", (assert: Assert) => {
router.map(function(match) {
match("/*everything").to("glob", function(match) {
match("/:extra").to("extra");
});
});
[
"folder1/folder2/folder3//the-extra-stuff/",
"folder1/folder2/folder3//the-extra-stuff"
].forEach(function(r) {
matchesRoute(assert, "/" + r, [
{
handler: "glob",
params: { everything: "folder1/folder2/folder3/" },
isDynamic: true
},
{
handler: "extra",
params: { extra: "the-extra-stuff" },
isDynamic: true
}
]);
});
});
QUnit.test(
"calls a delegate whenever a new context is entered",
(assert: Assert) => {
const passedArguments: string[] = [];
router.delegate = {
contextEntered: function(name, match) {
assert.ok(match instanceof Function, "The match is a function");
match("/").to("index");
passedArguments.push(name);
}
};
router.map(function(match) {
match("/").to("application", function(match) {
match("/posts").to("posts", function(match) {
match("/:post_id").to("post");
});
});
});
assert.deepEqual(
passedArguments,
["application", "posts"],
"The entered contexts were passed to contextEntered"
);
matchesRoute(assert, "/posts", [
{ handler: "application", params: {}, isDynamic: false },
{ handler: "posts", params: {}, isDynamic: false },
{ handler: "index", params: {}, isDynamic: false }
]);
}
);
QUnit.test("delegate can change added routes", (assert: Assert) => {
router.delegate = {
willAddRoute: function(context, route) {
if (!context) {
return route;
}
context = context.split(".").slice(-1)[0];
return context + "." + route;
},
// Test that both delegates work together
contextEntered: function(_, match) {
match("/").to("index");
}
};
router.map(function(match) {
match("/").to("application", function(match) {
match("/posts").to("posts", function(match) {
match("/:post_id").to("post");
});
});
});
matchesRoute(assert, "/posts", [
{ handler: "application", params: {}, isDynamic: false },
{ handler: "application.posts", params: {}, isDynamic: false },
{ handler: "posts.index", params: {}, isDynamic: false }
]);
matchesRoute(assert, "/posts/1", [
{ handler: "application", params: {}, isDynamic: false },
{ handler: "application.posts", params: {}, isDynamic: false },
// eslint-disable-next-line @typescript-eslint/camelcase
{ handler: "posts.post", params: { post_id: "1" }, isDynamic: true }
]);
});
QUnit.test("supports add-route callback", (assert: Assert) => {
const invocations: string[] = [];
router.map(
function(match) {
match("/").to("application", function(match) {
match("/loading").to("loading");
match("/_unused_dummy_error_path_route_application/:error").to(
"error"
);
match("/lobby").to("lobby", function(match) {
match("/loading").to("lobby.loading");
match("/_unused_dummy_error_path_route_lobby/:error").to(
"lobby.error"
);
match(":lobby_id").to("lobby.index");
match("/list").to("lobby.list");
});
match("/").to("index");
});
},
function(router, route) {
invocations.push(route.map(e => e.handler).join("."));
router.add(route);
}
);
const expected = [
"application.loading",
"application.error",
"application.lobby.lobby.loading",
"application.lobby.lobby.error",
"application.lobby.lobby.index",
"application.lobby.lobby.list",
"application.index"
];
assert.deepEqual(
expected,
invocations,
"invokes for the correct set of routes"
);
matchesRoute(assert, "/lobby/loading", [
{ handler: "application", params: {}, isDynamic: false },
{ handler: "lobby", params: {}, isDynamic: false },
{ handler: "lobby.loading", params: {}, isDynamic: false }
]);
});
}); | the_stack |
import {BoxrecRole} from "boxrec-requests/dist/boxrec-requests.constants";
import {BoutsGetter, BoutsInterface} from "../../decorators/bouts.decorator";
import {getLocationValue, townRegionCountryRegex, trimRemoveLineBreaks} from "../../helpers";
import {BoxrecBasic, BoxrecBoutLocation, BoxrecLocation} from "../boxrec.constants";
import {BoxrecPromoter} from "./boxrec.event.constants";
import {BoxrecPageEventBoutRow} from "./boxrec.page.event.bout.row";
import {BoxrecParseBouts} from "./boxrec.parse.bouts";
/**
* Used specifically for Events page and Dates page
*/
@BoutsGetter("table", BoxrecPageEventBoutRow, 2)
export abstract class BoxrecEvent extends BoxrecParseBouts implements BoutsInterface {
bouts: BoxrecPageEventBoutRow[];
protected static getVenueInformation(links: Cheerio): BoxrecBasic {
const obj: BoxrecBasic = {
id: null,
name: null,
};
// if the number of links is 1, it's presumably missing the venue
// we wouldn't know the venue and know the location
if (links.length > 1) {
const venueId: RegExpMatchArray | null = links.get(0).attribs.href.match(/(\d+)$/);
const venueName: string | undefined = links.get(0).children[0].data;
if (venueId && venueId[1] && venueName) {
obj.id = parseInt(venueId[1], 10);
obj.name = venueName;
}
}
return obj;
}
protected static getLocationInformation(links: Cheerio): BoxrecLocation {
// if the number of links is 2, the link with all the information changes position // 2 is 0, 3/4 is 1
const hrefPosition: number = +(links.length === 3 || links.length === 4);
const locationObject: BoxrecLocation = {
country: {
id: null,
name: null,
},
region: {
id: null,
name: null,
},
town: {
id: null,
name: null,
}
};
const locationMatches: RegExpMatchArray | null =
links.get(hrefPosition).attribs.href.match(townRegionCountryRegex) as string[];
if (locationMatches) {
const [, , , townId] = locationMatches;
if (townId) {
locationObject.town.id = parseInt(townId, 10);
locationObject.town.name = links.get(1).children[0].data as string;
}
// there are 1-4 links
// 2-3 usually means `region` is missing, 4 means it has town, region, country and venue
// 1 is only country
if (links.length === 4) {
locationObject.region = {
id: getLocationValue(links.get(2).attribs.href, "region"),
name: links.get(2).children[0].data as string,
};
locationObject.country = {
id: getLocationValue(links.get(3).attribs.href, "country"),
name: links.get(3).children[0].data as string,
};
} else if (links.length === 3) {
locationObject.country = {
id: getLocationValue(links.get(2).attribs.href, "country"),
name: links.get(2).children[0].data as string,
};
} else if (links.length === 2) {
locationObject.town = {
id: getLocationValue(links.get(0).attribs.href, "town"),
name: links.get(0).children[0].data as string,
};
}
if (links.length === 2 || links.length === 1) {
locationObject.country = {
id: getLocationValue(links.get(1).attribs.href, "country"),
name: links.get(1).children[0].data as string,
};
}
}
return locationObject;
}
get location(): BoxrecBoutLocation {
const locationObject: BoxrecBoutLocation = {
location: {
country: {
id: null,
name: null,
},
region: {
id: null,
name: null,
},
town: {
id: null,
name: null,
}
},
venue: {
id: null,
name: null,
},
};
const html: Cheerio = this.$(`<div>${this.parseLocation()}</div>`);
const links: Cheerio = html.find("a");
locationObject.venue = BoxrecEvent.getVenueInformation(links);
locationObject.location = BoxrecEvent.getLocationInformation(links);
return locationObject;
}
/**
* The order of returned matchmakers can change
*/
get matchmakers(): BoxrecBasic[] {
const html: Cheerio = this.$(`<div>${this.parseMatchmakers()}</div>`);
const matchmaker: BoxrecBasic[] = [];
html.find("a").each((i: number, elem: CheerioElement) => {
const href: RegExpMatchArray | null = this.$(elem).get(0).attribs.href.match(/(\d+)$/);
if (href) {
const name: string = this.$(elem).text();
matchmaker.push({
id: parseInt(href[1], 10),
name,
});
}
});
return matchmaker;
}
// does not exist on dates page
get promoters(): BoxrecPromoter[] {
const html: Cheerio = this.$(`<div>${this.parsePromoters()}</div>`);
const promoter: BoxrecPromoter[] = [];
html.find("a").each((i: number, elem: CheerioElement) => {
const href: string = this.$(elem).get(0).attribs.href;
const name: string = this.$(elem).text();
let id: number | null = null;
let company: string | null = null;
const matches: RegExpMatchArray | null = href.match(/(\d+)$/);
if (matches) {
id = parseInt(matches[0], 10);
}
const htmlString: string | null = html.html();
if (htmlString) {
// this regex may not work for everything (this comment was about `event` pages)
// turns out `events` page and `bout` page display promoters differently
// ex. of links between `event` pages and `bout` pages
// events - `Golden Boy Promotions - Oscar De La Hoya`
// bouts - `Oscar De La Hoya (Golden Boy Promotions)`
// first we'll figure out which one we're looking at, then choose the proper regex to use
// we should also assume that both might fail
// these both share the same characters for company names
// capture forward slashes in it because `360/GGG/K2 Promotions`
const promoterEventsPageRegex: RegExp = /([\w\d\/\-\s]+)\s-\s<a\shref/g;
const promoterBoutsPageRegex: RegExp = /\(([\w\d\/\-\s]+)\)/g;
const eventsRegexReturnsResults: RegExpMatchArray | null = promoterEventsPageRegex.exec(htmlString);
let regexThatGetsResults: RegExp;
if (eventsRegexReturnsResults !== null) {
regexThatGetsResults = promoterEventsPageRegex;
} else {
const boutsRegexReturnsResults: RegExpMatchArray | null = promoterBoutsPageRegex.exec(htmlString);
if (boutsRegexReturnsResults !== null) {
regexThatGetsResults = promoterBoutsPageRegex;
} else {
// both regex did not work, either broken or they don't exist
return promoter;
}
}
regexThatGetsResults.lastIndex = 0; // reset the index of the `RegExp` // requires `g` flag on regex
let m: RegExpExecArray | null;
let j: number = 0;
do {
m = regexThatGetsResults.exec(htmlString);
if (m && m[1]) {
if (j === promoter.length) {
company = m[1].trim();
}
}
j++;
} while (m);
if (company) {
promoter.push({
company,
id,
name,
});
}
}
});
return promoter;
}
/**
* Returns contact information on how to buy tickets for this event
* example: boxrec.com/en/date?ByV%5Bdate%5D%5Byear%5D=2019&ByV%5Bdate%5D%5Bmonth%5D=11&ByV%5Bdate%5D%5Bday%5D=16
*/
get tickets(): string | null {
const tickets: string = this.parseEventData("tickets", false);
return tickets ? trimRemoveLineBreaks(tickets) : null;
}
protected getPeopleTable(): Cheerio {
return this.$("table thead table tbody tr");
}
protected parseEventData(role: BoxrecRole | "media" | "commission" | "tickets", parseHTML: boolean = true)
: string {
let results: string | null = "";
this.getPeopleTable().each((i: number, elem: CheerioElement) => {
const tag: string = this.$(elem).find("td:nth-child(1)").text().trim();
const val: Cheerio = this.$(elem).find("td:nth-child(2)");
if (tag === role) {
// tested if `television` might actually be a BoxRec role but it isn't
results = parseHTML ? val.html() : val.text();
}
});
return results;
}
// to be overridden by child class
protected parseLocation(): string {
throw new Error("Needs to be overridden by child class");
}
// to be overridden by child class
protected parseMatchmakers(): string {
throw new Error("Needs to be overridden by child class");
}
// to be overridden by child class
protected parsePromoters(): string {
throw new Error("Needs to be overridden by child class");
}
} | the_stack |
export = Encoder;
/**
* @typedef EncodingOptions
* @property {any[]|object} [genTypes=[]] Array of pairs of
* `type`, `function(Encoder)` for semantic types to be encoded. Not
* needed for Array, Date, Buffer, Map, RegExp, Set, or URL.
* If an object, the keys are the constructor names for the types.
* @property {boolean} [canonical=false] Should the output be
* canonicalized.
* @property {boolean|WeakSet} [detectLoops=false] Should object loops
* be detected? This will currently add memory to track every part of the
* object being encoded in a WeakSet. Do not encode
* the same object twice on the same encoder, without calling
* `removeLoopDetectors` in between, which will clear the WeakSet.
* You may pass in your own WeakSet to be used; this is useful in some
* recursive scenarios.
* @property {("number"|"float"|"int"|"string")} [dateType="number"] -
* how should dates be encoded? "number" means float or int, if no
* fractional seconds.
* @property {any} [encodeUndefined=undefined] How should an
* "undefined" in the input be encoded. By default, just encode a CBOR
* undefined. If this is a buffer, use those bytes without re-encoding
* them. If this is a function, the function will be called (which is a
* good time to throw an exception, if that's what you want), and the
* return value will be used according to these rules. Anything else will
* be encoded as CBOR.
* @property {boolean} [disallowUndefinedKeys=false] Should
* "undefined" be disallowed as a key in a Map that is serialized? If
* this is true, encode(new Map([[undefined, 1]])) will throw an
* exception. Note that it is impossible to get a key of undefined in a
* normal JS object.
* @property {boolean} [collapseBigIntegers=false] Should integers
* that come in as ECMAscript bigint's be encoded
* as normal CBOR integers if they fit, discarding type information?
* @property {number} [chunkSize=4096] Number of characters or bytes
* for each chunk, if obj is a string or Buffer, when indefinite encoding.
* @property {boolean} [omitUndefinedProperties=false] When encoding
* objects or Maps, do not include a key if its corresponding value is
* `undefined`.
*/
/**
* Transform JavaScript values into CBOR bytes. The `Writable` side of
* the stream is in object mode.
*
* @extends stream.Transform
*/
declare class Encoder extends stream.Transform {
/**
* Encode an array and all of its elements.
*
* @param {Encoder} gen Encoder to use.
* @param {any[]} obj Array to encode.
* @param {object} [opts] Options.
* @param {boolean} [opts.indefinite=false] Use indefinite encoding?
* @returns {boolean} True on success.
*/
static pushArray(gen: Encoder, obj: any[], opts?: {
indefinite?: boolean;
}): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {Date} obj Date to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushDate(gen: Encoder, obj: Date): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {Buffer} obj Buffer to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushBuffer(gen: Encoder, obj: Buffer): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {NoFilter} obj Buffer to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushNoFilter(gen: Encoder, obj: NoFilter): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {RegExp} obj RegExp to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushRegexp(gen: Encoder, obj: RegExp): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {Set} obj Set to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushSet(gen: Encoder, obj: Set<any>): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {URL} obj URL to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushURL(gen: Encoder, obj: URL): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {object} obj Boxed String, Number, or Boolean object to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushBoxed(gen: Encoder, obj: object): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {Map} obj Map to encode.
* @returns {boolean} True on success.
* @throws {Error} Map key that is undefined.
* @ignore
*/
static _pushMap(gen: Encoder, obj: Map<any, any>, opts: any): boolean;
/**
* @param {Encoder} gen Encoder.
* @param {NodeJS.TypedArray} obj Array to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushTypedArray(gen: Encoder, obj: NodeJS.TypedArray): boolean;
/**
* @param {Encoder} gen Encoder.
* @param { ArrayBuffer } obj Array to encode.
* @returns {boolean} True on success.
* @ignore
*/
static _pushArrayBuffer(gen: Encoder, obj: ArrayBuffer): boolean;
/**
* Encode the given object with indefinite length. There are apparently
* some (IMO) broken implementations of poorly-specified protocols that
* REQUIRE indefinite-encoding. See the example for how to add this as an
* `encodeCBOR` function to an object or class to get indefinite encoding.
*
* @param {Encoder} gen The encoder to use.
* @param {string|Buffer|Array|Map|object} [obj] The object to encode. If
* null, use "this" instead.
* @param {EncodingOptions} [options={}] Options for encoding.
* @returns {boolean} True on success.
* @throws {Error} No object to encode or invalid indefinite encoding.
* @example <caption>Force indefinite encoding:</caption>
* const o = {
* a: true,
* encodeCBOR: cbor.Encoder.encodeIndefinite,
* }
* const m = []
* m.encodeCBOR = cbor.Encoder.encodeIndefinite
* cbor.encodeOne([o, m])
*/
static encodeIndefinite(gen: Encoder, obj?: string | Buffer | any[] | Map<any, any> | object, options?: EncodingOptions): boolean;
/**
* Encode one or more JavaScript objects, and return a Buffer containing the
* CBOR bytes.
*
* @param {...any} objs The objects to encode.
* @returns {Buffer} The encoded objects.
*/
static encode(...objs: any[]): Buffer;
/**
* Encode one or more JavaScript objects canonically (slower!), and return
* a Buffer containing the CBOR bytes.
*
* @param {...any} objs The objects to encode.
* @returns {Buffer} The encoded objects.
*/
static encodeCanonical(...objs: any[]): Buffer;
/**
* Encode one JavaScript object using the given options.
*
* @static
* @param {any} obj The object to encode.
* @param {EncodingOptions} [options={}] Passed to the Encoder constructor.
* @returns {Buffer} The encoded objects.
*/
static encodeOne(obj: any, options?: EncodingOptions): Buffer;
/**
* Encode one JavaScript object using the given options in a way that
* is more resilient to objects being larger than the highWaterMark
* number of bytes. As with the other static encode functions, this
* will still use a large amount of memory. Use a stream-based approach
* directly if you need to process large and complicated inputs.
*
* @param {any} obj The object to encode.
* @param {EncodingOptions} [options={}] Passed to the Encoder constructor.
* @returns {Promise<Buffer>} A promise for the encoded buffer.
*/
static encodeAsync(obj: any, options?: EncodingOptions): Promise<Buffer>;
static set SEMANTIC_TYPES(arg: {
[x: string]: EncodeFunction;
});
/**
* The currently supported set of semantic types. May be modified by plugins.
*
* @type {SemanticMap}
*/
static get SEMANTIC_TYPES(): {
[x: string]: EncodeFunction;
};
/**
* Reset the supported semantic types to the original set, before any
* plugins modified the list.
*/
static reset(): void;
/**
* Creates an instance of Encoder.
*
* @param {EncodingOptions} [options={}] Options for the encoder.
*/
constructor(options?: EncodingOptions);
canonical: boolean;
encodeUndefined: any;
disallowUndefinedKeys: boolean;
dateType: "string" | "number" | "float" | "int";
collapseBigIntegers: boolean;
/** @type {WeakSet?} */
detectLoops: WeakSet<any> | null;
omitUndefinedProperties: boolean;
semanticTypes: {
[x: string]: EncodeFunction;
};
/**
* @param {number} val Number(0-255) to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushUInt8(val: number): boolean;
/**
* @param {number} val Number(0-65535) to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushUInt16BE(val: number): boolean;
/**
* @param {number} val Number(0..2**32-1) to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushUInt32BE(val: number): boolean;
/**
* @param {number} val Number to encode as 4-byte float.
* @returns {boolean} True on success.
* @ignore
*/
_pushFloatBE(val: number): boolean;
/**
* @param {number} val Number to encode as 8-byte double.
* @returns {boolean} True on success.
* @ignore
*/
_pushDoubleBE(val: number): boolean;
/**
* @returns {boolean} True on success.
* @ignore
*/
_pushNaN(): boolean;
/**
* @param {number} obj Positive or negative infinity.
* @returns {boolean} True on success.
* @ignore
*/
_pushInfinity(obj: number): boolean;
/**
* Choose the best float representation for a number and encode it.
*
* @param {number} obj A number that is known to be not-integer, but not
* how many bytes of precision it needs.
* @returns {boolean} True on success.
* @ignore
*/
_pushFloat(obj: number): boolean;
/**
* Choose the best integer representation for a postive number and encode
* it. If the number is over MAX_SAFE_INTEGER, fall back on float (but I
* don't remember why).
*
* @param {number} obj A positive number that is known to be an integer,
* but not how many bytes of precision it needs.
* @param {number} mt The Major Type number to combine with the integer.
* Not yet shifted.
* @param {number} [orig] The number before it was transformed to positive.
* If the mt is NEG_INT, and the positive number is over MAX_SAFE_INT,
* then we'll encode this as a float rather than making the number
* negative again and losing precision.
* @returns {boolean} True on success.
* @ignore
*/
_pushInt(obj: number, mt: number, orig?: number): boolean;
/**
* Choose the best integer representation for a number and encode it.
*
* @param {number} obj A number that is known to be an integer,
* but not how many bytes of precision it needs.
* @returns {boolean} True on success.
* @ignore
*/
_pushIntNum(obj: number): boolean;
/**
* @param {number} obj Plain JS number to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushNumber(obj: number): boolean;
/**
* @param {string} obj String to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushString(obj: string): boolean;
/**
* @param {boolean} obj Bool to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushBoolean(obj: boolean): boolean;
/**
* @param {undefined} obj Ignored.
* @returns {boolean} True on success.
* @ignore
*/
_pushUndefined(obj: undefined): boolean;
/**
* @param {null} obj Ignored.
* @returns {boolean} True on success.
* @ignore
*/
_pushNull(obj: null): boolean;
/**
* @param {number} tag Tag number to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushTag(tag: number): boolean;
/**
* @param {bigint} obj BigInt to encode.
* @returns {boolean} True on success.
* @ignore
*/
_pushJSBigint(obj: bigint): boolean;
/**
* @param {object} obj Object to encode.
* @returns {boolean} True on success.
* @throws {Error} Loop detected.
* @ignore
*/
_pushObject(obj: object, opts: any): boolean;
/**
* @param {any[]} objs Array of supported things.
* @returns {Buffer} Concatenation of encodings for the supported things.
* @ignore
*/
_encodeAll(objs: any[]): Buffer;
/**
* Add an encoding function to the list of supported semantic types. This
* is useful for objects for which you can't add an encodeCBOR method.
*
* @param {string|Function} type The type to encode.
* @param {EncodeFunction} fun The encoder to use.
* @returns {EncodeFunction?} The previous encoder or undefined if there
* wasn't one.
* @throws {TypeError} Invalid function.
*/
addSemanticType(type: string | Function, fun: EncodeFunction): EncodeFunction | null;
/**
* Push any supported type onto the encoded stream.
*
* @param {any} obj The thing to encode.
* @returns {boolean} True on success.
* @throws {TypeError} Unknown type for obj.
*/
pushAny(obj: any): boolean;
/**
* Remove the loop detector WeakSet for this Encoder.
*
* @returns {boolean} True when the Encoder was reset, else false.
*/
removeLoopDetectors(): boolean;
}
declare namespace Encoder {
export { EncodeFunction, SemanticMap, EncodingOptions };
}
import stream = require("stream");
/**
* Generate the CBOR for a value. If you are using this, you'll either need
* to call {@link Encoder.write } with a Buffer, or look into the internals of
* Encoder to reuse existing non-documented behavior.
*/
type EncodeFunction = (enc: Encoder, val: any) => boolean;
import { Buffer } from "buffer";
import NoFilter = require("nofilter");
type EncodingOptions = {
/**
* Array of pairs of
* `type`, `function(Encoder)` for semantic types to be encoded. Not
* needed for Array, Date, Buffer, Map, RegExp, Set, or URL.
* If an object, the keys are the constructor names for the types.
*/
genTypes?: any[] | object;
/**
* Should the output be
* canonicalized.
*/
canonical?: boolean;
/**
* Should object loops
* be detected? This will currently add memory to track every part of the
* object being encoded in a WeakSet. Do not encode
* the same object twice on the same encoder, without calling
* `removeLoopDetectors` in between, which will clear the WeakSet.
* You may pass in your own WeakSet to be used; this is useful in some
* recursive scenarios.
*/
detectLoops?: boolean | WeakSet<any>;
/**
* -
* how should dates be encoded? "number" means float or int, if no
* fractional seconds.
*/
dateType?: ("number" | "float" | "int" | "string");
/**
* How should an
* "undefined" in the input be encoded. By default, just encode a CBOR
* undefined. If this is a buffer, use those bytes without re-encoding
* them. If this is a function, the function will be called (which is a
* good time to throw an exception, if that's what you want), and the
* return value will be used according to these rules. Anything else will
* be encoded as CBOR.
*/
encodeUndefined?: any;
/**
* Should
* "undefined" be disallowed as a key in a Map that is serialized? If
* this is true, encode(new Map([[undefined, 1]])) will throw an
* exception. Note that it is impossible to get a key of undefined in a
* normal JS object.
*/
disallowUndefinedKeys?: boolean;
/**
* Should integers
* that come in as ECMAscript bigint's be encoded
* as normal CBOR integers if they fit, discarding type information?
*/
collapseBigIntegers?: boolean;
/**
* Number of characters or bytes
* for each chunk, if obj is a string or Buffer, when indefinite encoding.
*/
chunkSize?: number;
/**
* When encoding
* objects or Maps, do not include a key if its corresponding value is
* `undefined`.
*/
omitUndefinedProperties?: boolean;
};
/**
* A mapping from tag number to a tag decoding function.
*/
type SemanticMap = {
[x: string]: EncodeFunction;
}; | the_stack |
/**
* @license Copyright © 2013 onwards, Andrew Whewell
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the author nor the names of the program's contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OF THE SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @fileoverview Exposes the current location of the browser.
*/
namespace VRS
{
/*
* Global options
*/
export var globalOptions: GlobalOptions = VRS.globalOptions || {};
VRS.globalOptions.currentLocationFixed = VRS.globalOptions.currentLocationFixed || undefined; // Set to an object of { lat: 1.234, lng: 5.678 }; to force the default current location (when the user has not assigned a location) to a fixed point rather than the server-configured initial location.
VRS.globalOptions.currentLocationConfigurable = VRS.globalOptions.currentLocationConfigurable !== undefined ? VRS.globalOptions.currentLocationConfigurable : true; // True if the user is allowed to set their current location.
VRS.globalOptions.currentLocationIconUrl = VRS.globalOptions.currentLocationIconUrl || null; // The icon to display on the map for the set current location marker.
VRS.globalOptions.currentLocationUseGeoLocation = VRS.globalOptions.currentLocationUseGeoLocation !== undefined ? VRS.globalOptions.currentLocationUseGeoLocation : true; // True if the option to use the browser's current location should be shown.
VRS.globalOptions.currentLocationUseBrowserLocation = VRS.globalOptions.currentLocationUseBrowserLocation !== undefined ? VRS.globalOptions.currentLocationUseBrowserLocation : VRS.globalOptions.isMobile; // True if the browser location should be used as the current location. This overrides the map centre / user-supplied location options.
VRS.globalOptions.currentLocationShowOnMap = VRS.globalOptions.currentLocationShowOnMap !== undefined ? VRS.globalOptions.currentLocationShowOnMap : true; // True if the current location should be shown on the map
VRS.globalOptions.currentLocationImageUrl = VRS.globalOptions.currentLocationImageUrl || 'images/location.png'; // The URL of the current location marker.
VRS.globalOptions.currentLocationImageSize = VRS.globalOptions.currentLocationImageSize || { width: 10, height: 10 }; // The size of the current location marker.
VRS.globalOptions.currentLocationUseMapCentreForFirstVisit = VRS.globalOptions.currentLocationUseMapCentreForFirstVisit != undefined ? VRS.globalOptions.currentLocationUseMapCentreForFirstVisit : true; // If true then on the first visit the user-supplied current location is set to the map centre. If false then the user must always choose a current location (i.e. the same behaviour as version 1 of the site).
/**
* The settings to use when creating a new CurrentLocation object.
*/
export interface CurrentLocation_Settings
{
name?: string; // The name to use when saving and loading state.
mapForApproximateLocation?: JQuery; // The jQuery element containing the map to take the approximate location from.
}
/**
* The settings that records a CurrentLocation object's state.
*/
export interface CurrentLocation_SaveState
{
userSuppliedLocation?: ILatLng;
useBrowserLocation?: boolean;
showCurrentLocation?: boolean;
}
/**
* An object that keeps track of the user's current location.
*/
export class CurrentLocation implements ISelfPersist<CurrentLocation_SaveState>
{
private _Dispatcher = new VRS.EventHandler({
name: 'VRS.CurrentLocation'
});
private _Events = {
currentLocationChanged: 'currentLocationChanged'
};
private _SetCurrentLocationMarker: IMapMarker = null; // The map marker used to manually set the user's current location.
private _CurrentLocationMarker: IMapMarker = null; // The map marker used to display the current location.
private _PlottedCurrentLocation: ILatLng = null; // The position at which the current location marker has been plotted.
private _MapForDisplay: JQuery = undefined; // The map to display the location pin on. This might not be the same as the map for approximate location.
private _MapForApproximateLocationPlugin: IMap = null; // A direct reference to the map plugin for the approximate location map.
private _MapForApproximateLocationCentreChangedHookResult: IEventHandleJQueryUI = null; // The hook result from the "map for approximate location"'s centre changed event.
private _MapMarkerDraggedHookResult: IEventHandleJQueryUI = null; // The hook result from the map marker dragged event.
private _Name: string;
private _CurrentLocation: ILatLng = VRS.globalOptions.currentLocationFixed;
private _GeoLocationAvailable: boolean;
private _LastBrowserLocation: ILatLng = null;
private _GeoLocationHandlersInstalled = false;
private _UseBrowserLocation: boolean = VRS.globalOptions.currentLocationUseBrowserLocation;
private _UserSuppliedCurrentLocation: ILatLng = null;
private _MapCentreLocation: ILatLng = null;
private _MapForApproximateLocation: JQuery = null;
private _ShowCurrentLocationOnMap: boolean = VRS.globalOptions.currentLocationShowOnMap;
constructor(settings?: CurrentLocation_Settings)
{
settings = $.extend({
name: 'default',
mapForApproximateLocation: null
}, settings);
this._Name = settings.name;
this._GeoLocationAvailable = 'geolocation' in navigator;
this.setMapForApproximateLocation(settings.mapForApproximateLocation);
}
/**
* Gets the name of the object.
*/
getName = () : string =>
{
return this._Name;
}
/**
* Gets the current location.
*/
getCurrentLocation = () : ILatLng =>
{
return this._CurrentLocation;
}
setCurrentLocation = (value: ILatLng) =>
{
if(value && this._CurrentLocation !== value) {
this._CurrentLocation = value;
this.showCurrentLocationOnMap();
this._Dispatcher.raise(this._Events.currentLocationChanged);
}
}
/**
* Gets a value indicating whether we're allowed to use the browser's current location.
*/
getGeoLocationAvailable = () : boolean =>
{
return VRS.globalOptions.currentLocationUseGeoLocation && this._GeoLocationAvailable;
}
/**
* Gets the last reported browser location. This can be null if the browser has never reported a position.
*/
getLastBrowserLocation = () : ILatLng =>
{
return this._LastBrowserLocation;
}
/**
* Gets a value indicating that the user wants the current location to be supplied by the browser. This overrides
* all other options.
*/
getUseBrowserLocation = () : boolean =>
{
return this._UseBrowserLocation;
}
setUseBrowserLocation = (value: boolean) =>
{
this._UseBrowserLocation = value;
if(!this._UseBrowserLocation || !this.getGeoLocationAvailable()) {
if(this.getUserHasAssignedCurrentLocation()) this.setCurrentLocation(this._UserSuppliedCurrentLocation);
else this.setCurrentLocation(this._MapCentreLocation);
} else {
if(this._GeoLocationHandlersInstalled) {
if(this.getBrowserIsSupplyingLocation()) {
this.setCurrentLocation(this._LastBrowserLocation);
}
} else {
this._GeoLocationHandlersInstalled = true;
navigator.geolocation.getCurrentPosition((position: Position) => {
this.useBrowserPosition(position);
navigator.geolocation.watchPosition(
this.useBrowserPosition,
() => {
this._LastBrowserLocation = null
}
);
});
}
}
}
private useBrowserPosition = (position: Position) =>
{
this._LastBrowserLocation = { lat: position.coords.latitude, lng: position.coords.longitude };
if(this.getBrowserIsSupplyingLocation()) {
this.setCurrentLocation(this._LastBrowserLocation);
}
}
/**
* Gets a value indicating that the browser is supplying the current location.
*/
getBrowserIsSupplyingLocation = () : boolean =>
{
return !!(this._UseBrowserLocation && this._LastBrowserLocation);
}
/**
* Gets the current location supplied by the user.
*/
getUserSuppliedCurrentLocation = () : ILatLng =>
{
return this._UserSuppliedCurrentLocation;
}
setUserSuppliedCurrentLocation = (value: ILatLng) =>
{
if(value && value !== this._UserSuppliedCurrentLocation) {
this._UserSuppliedCurrentLocation = value;
if(!this.getBrowserIsSupplyingLocation() && value) this.setCurrentLocation(value);
}
}
/**
* Gets a value indicating that the user has supplied a current location.
*/
getUserHasAssignedCurrentLocation = () : boolean =>
{
return this._UserSuppliedCurrentLocation !== null;
}
/**
* Gets the centre location from the map that is currently supplying approximate locations.
*/
getMapCentreLocation = () : ILatLng =>
{
return this._MapCentreLocation;
}
setMapCentreLocation = (value: ILatLng) =>
{
if(value && value !== this._MapCentreLocation) {
this._MapCentreLocation = value;
if(this.getMapIsSupplyingLocation()) {
this.setCurrentLocation(value);
}
}
}
/**
* Returns true if the current location is the map centre.
*/
getMapIsSupplyingLocation = () : boolean =>
{
return !this.getUserHasAssignedCurrentLocation() && !this.getBrowserIsSupplyingLocation();
}
/**
* Gets the JQuery element that holds the map that is being used for approximate locations.
*/
getMapForApproximateLocation = () : JQuery =>
{
return this._MapForApproximateLocation;
}
setMapForApproximateLocation = (value: JQuery) =>
{
if(this._MapForApproximateLocationCentreChangedHookResult) {
this._MapForApproximateLocationPlugin.unhook(this._MapForApproximateLocationCentreChangedHookResult);
this._MapForApproximateLocationCentreChangedHookResult = null;
}
this._MapForApproximateLocation = value;
if(this._MapForApproximateLocation != null) {
this._MapForApproximateLocationPlugin = VRS.jQueryUIHelper.getMapPlugin(this._MapForApproximateLocation);
this._MapForApproximateLocationCentreChangedHookResult = this._MapForApproximateLocationPlugin.hookCenterChanged(this.mapForApproximateLocationCentreChanged, this);
if(VRS.globalOptions.currentLocationUseMapCentreForFirstVisit && !this._UserSuppliedCurrentLocation) {
var centre = this._MapForApproximateLocationPlugin.getCenter();
this.setUserSuppliedCurrentLocation(centre);
// Load the settings. If there were no settings then it'll return the current centre, which is the
// map centre. We can then safely save the settings with the map centre and from then on it'll use
// them, regardless of where the user drags the map. If the user sets their own location then this
// will also stop us from overwriting it.
var settings = this.loadState();
if(settings.userSuppliedLocation.lat === centre.lat && settings.userSuppliedLocation.lng === centre.lng) {
this.applyState(settings);
this.saveState();
}
}
}
this.showCurrentLocationOnMap();
this.determineLocationFromMap();
}
/**
* Gets a value indicating that the current location marker is currently being displayed on the map.
*/
getIsSetCurrentLocationMarkerDisplayed = () : boolean =>
{
return !!(this._SetCurrentLocationMarker);
}
setIsSetCurrentLocationMarkerDisplayed = (value: boolean) =>
{
if(value !== this.getIsSetCurrentLocationMarkerDisplayed()) {
this.showOrHideSetCurrentLocationMarker(value);
}
}
getShowCurrentLocationOnMap = () : boolean =>
{
return this._ShowCurrentLocationOnMap;
}
setShowCurrentLocationOnMap = (value: boolean) =>
{
if(this._ShowCurrentLocationOnMap !== value) {
this._ShowCurrentLocationOnMap = value;
this.showCurrentLocationOnMap();
}
}
/**
* Hooks an event that is raised when the current location is changed.
*/
hookCurrentLocationChanged = (callback: () => void, forceThis?: Object) : IEventHandle =>
{
return this._Dispatcher.hook(this._Events.currentLocationChanged, callback, forceThis);
}
/**
* Unhooks an event from the object.
*/
unhook = (hookResult: IEventHandle) =>
{
this._Dispatcher.unhook(hookResult);
}
/**
* Releases the resources held by the object.
*/
dispose = () =>
{
this.destroyCurrentLocationMarker();
this.showOrHideSetCurrentLocationMarker(false);
if(this._MapForApproximateLocationCentreChangedHookResult) {
this._MapForApproximateLocationPlugin.unhook(this._MapForApproximateLocationCentreChangedHookResult);
this._MapForApproximateLocationCentreChangedHookResult = null;
}
this._MapForApproximateLocation = null;
}
/**
* Saves the current state of the object.
*/
saveState = () =>
{
VRS.configStorage.save(this.persistenceKey(), this.createSettings());
}
/**
* Returns the previously saved state or the current state if no state has been saved.
*/
loadState = () : CurrentLocation_SaveState =>
{
var savedSettings = VRS.configStorage.load(this.persistenceKey(), {});
return $.extend(this.createSettings(), savedSettings);
}
/**
* Applies the previously saved state to the object.
*/
applyState = (settings: CurrentLocation_SaveState) =>
{
this.setUserSuppliedCurrentLocation(settings.userSuppliedLocation);
this.setUseBrowserLocation(settings.useBrowserLocation);
this.setShowCurrentLocationOnMap(settings.showCurrentLocation);
}
/**
* Loads and applies the previously saved state to the object.
*/
loadAndApplyState = () =>
{
this.applyState(this.loadState());
}
/**
* Returns the key to save the state against.
*/
private persistenceKey() : string
{
return 'vrsCurrentLocation-' + this.getName();
}
/**
* Returns the current state of the object.
*/
private createSettings() : CurrentLocation_SaveState
{
return {
userSuppliedLocation: this.getUserSuppliedCurrentLocation(),
useBrowserLocation: this.getUseBrowserLocation(),
showCurrentLocation: this.getShowCurrentLocationOnMap()
};
}
/**
* Returns the option pane that allows the object to be configured.
*/
createOptionPane = (displayOrder: number, mapForLocationDisplay: JQuery) : OptionPane =>
{
var pane = new VRS.OptionPane({
name: 'vrsCurrentLocation' + this.getName(),
titleKey: 'PaneCurrentLocation',
displayOrder: displayOrder
});
if(!mapForLocationDisplay) mapForLocationDisplay = this.getMapForApproximateLocation();
if(mapForLocationDisplay && VRS.globalOptions.currentLocationConfigurable) {
this._MapForDisplay = mapForLocationDisplay;
pane.addField(new VRS.OptionFieldLabel({
name: 'instructions',
labelKey: 'CurrentLocationInstruction'
}));
pane.addField(new VRS.OptionFieldCheckBox({
name: 'setCurrentLocation',
labelKey: 'SetCurrentLocation',
getValue: this.getIsSetCurrentLocationMarkerDisplayed,
setValue: this.setIsSetCurrentLocationMarkerDisplayed
}));
}
if(VRS.globalOptions.currentLocationUseGeoLocation && this.getGeoLocationAvailable()) {
pane.addField(new VRS.OptionFieldCheckBox({
name: 'useBrowserLocation',
labelKey: 'UseBrowserLocation',
getValue: this.getUseBrowserLocation,
setValue: this.setUseBrowserLocation,
saveState: this.saveState
}));
}
pane.addField(new VRS.OptionFieldCheckBox({
name: 'showCurrentLocation',
labelKey: 'ShowCurrentLocation',
getValue: this.getShowCurrentLocationOnMap,
setValue: this.setShowCurrentLocationOnMap,
saveState: this.saveState,
keepWithNext: true
}));
pane.addField(new VRS.OptionFieldLabel({
name: 'currentLocationValue',
labelKey: () => {
var location = this.getCurrentLocation();
var lat = location && location.lat !== undefined ? VRS.stringUtility.formatNumber(location.lat, 'N5') : '';
var lng = location && location.lng !== undefined ? VRS.stringUtility.formatNumber(location.lng, 'N5') : '';
return lat && lng ? ' (' + lat + ' / ' + lng + ')' : '';
}
}));
return pane;
}
/**
* Sets the current location to the map centre.
*/
private determineLocationFromMap = () =>
{
if(this._MapForApproximateLocationPlugin) {
var centre = this._MapForApproximateLocationPlugin.getCenter();
if(centre) this.setMapCentreLocation(centre);
}
}
/**
* Hides or shows the current location based on the setting of isMarkerDisplayed.
*/
private showOrHideSetCurrentLocationMarker = (showMarker: boolean) =>
{
if(this._MapForDisplay) {
var plugin: IMap = VRS.jQueryUIHelper.getMapPlugin(this._MapForDisplay);
if(!showMarker) {
if(this._MapMarkerDraggedHookResult) plugin.unhook(this._MapMarkerDraggedHookResult);
if(this._SetCurrentLocationMarker) plugin.destroyMarker(this._SetCurrentLocationMarker);
this._MapMarkerDraggedHookResult = null;
this._SetCurrentLocationMarker = null;
} else {
var markerOptions: IMapMarkerSettings = {
animateAdd: true,
clickable: false,
draggable: true,
flat: true,
optimized: false,
raiseOnDrag: true,
visible: true,
zIndex: 200
};
var currentLocation = this.getUserSuppliedCurrentLocation() || this.getCurrentLocation();
if(currentLocation) {
markerOptions.position = currentLocation;
}
if(VRS.globalOptions.currentLocationIconUrl) {
markerOptions.icon = VRS.globalOptions.currentLocationIconUrl;
}
this._SetCurrentLocationMarker = plugin.addMarker('setCurrentLocation', markerOptions);
this._MapMarkerDraggedHookResult = plugin.hookMarkerDragged(this.setCurrentLocationMarkerDragged);
if(currentLocation) plugin.panTo(currentLocation);
}
}
}
/**
* Called when the user has finished dragging the marker representing the current location on the map.
*/
private setCurrentLocationMarkerDragged = (event: Event, data: IMapMarkerEventArgs) =>
{
if(this._SetCurrentLocationMarker && data.id === this._SetCurrentLocationMarker.id) {
var plugin: IMap = VRS.jQueryUIHelper.getMapPlugin(this._MapForDisplay);
this.setUserSuppliedCurrentLocation(this._SetCurrentLocationMarker.getPosition());
this.saveState();
}
}
/**
* Called when the user drags the approximate location map around.
*/
private mapForApproximateLocationCentreChanged = () =>
{
this.determineLocationFromMap();
}
/**
* Shows, hides or moves the current location marker on the map.
*/
private showCurrentLocationOnMap = () =>
{
if(this._MapForApproximateLocation) {
var plugin = this._MapForApproximateLocationPlugin;
var currentLocation = this.getCurrentLocation();
var showCurrentLocation = this.getShowCurrentLocationOnMap();
if(!currentLocation || !showCurrentLocation) {
this.destroyCurrentLocationMarker();
} else {
if(this._CurrentLocationMarker) {
if(this._PlottedCurrentLocation.lat !== currentLocation.lat || this._PlottedCurrentLocation.lng !== currentLocation.lng) {
this._PlottedCurrentLocation = currentLocation;
this._CurrentLocationMarker.setPosition(this._PlottedCurrentLocation);
}
} else {
this._PlottedCurrentLocation = currentLocation;
this._CurrentLocationMarker = plugin.addMarker('staticCurrentLocationMarker', {
clickable: false,
draggable: false,
flat: true,
optimized: false,
visible: true,
position: this._PlottedCurrentLocation,
icon: new VRS.MapIcon(
VRS.globalOptions.currentLocationImageUrl,
VRS.globalOptions.currentLocationImageSize,
null,
null,
VRS.globalOptions.currentLocationImageSize,
null
),
zIndex: 0
});
}
}
}
}
/**
* Destroys the current location marker.
*/
private destroyCurrentLocationMarker = () =>
{
if(this._CurrentLocationMarker) {
var plugin = this._MapForApproximateLocationPlugin;
plugin.destroyMarker(this._CurrentLocationMarker);
this._CurrentLocationMarker = null;
this._PlottedCurrentLocation = null;
}
}
}
/*
* Pre-builts
*/
export var currentLocation = new VRS.CurrentLocation();
} | the_stack |
'use strict'
// **Github:** https://github.com/fidm/quic
//
// **License:** MIT
import { BufferVisitor } from './common'
// https://github.com/google/proto-quic/blob/master/src/net/quic/core/quic_error_codes.h
const INVALID_ERROR = { name: 'INVALID_ERROR_CODE', code: 0xffffffff }
const INVALID_RST_STREAM_ERROR = { name: 'INVALID_RST_STREAM_ERROR_CODE', code: 0xffffffff }
const streamErrors = Object.assign(Object.create(null) as object, {
QUIC_STREAM_NO_ERROR: {
code: 0,
message: '',
},
QUIC_ERROR_PROCESSING_STREAM: {
code: 1,
message: 'There was some error which halted stream processing.',
},
QUIC_MULTIPLE_TERMINATION_OFFSETS: {
code: 2,
message: 'We got two fin or reset offsets which did not match.',
},
QUIC_BAD_APPLICATION_PAYLOAD: {
code: 3,
message: 'We got bad payload and can not respond to it at the protocol level.',
},
QUIC_STREAM_CONNECTION_ERROR: {
code: 4,
message: 'Stream closed due to connection error. No reset frame is sent when this happens.',
},
QUIC_STREAM_PEER_GOING_AWAY: {
code: 5,
message: 'GoAway frame sent. No more stream can be created.',
},
QUIC_STREAM_CANCELLED: {
code: 6,
message: 'The stream has been cancelled.',
},
QUIC_RST_ACKNOWLEDGEMENT: {
code: 7,
message: 'Closing stream locally, sending a RST to allow for proper flow control' +
'accounting. Sent in response to a RST from the peer.',
},
QUIC_REFUSED_STREAM: {
code: 8,
message: 'Receiver refused to create the stream (because its limit on open streams' +
' has been reached). The sender should retry the request later (using another stream).',
},
QUIC_INVALID_PROMISE_URL: {
code: 9,
message: 'Invalid URL in PUSH_PROMISE request header.',
},
QUIC_UNAUTHORIZED_PROMISE_URL: {
code: 10,
message: 'Server is not authoritative for this URL.',
},
QUIC_DUPLICATE_PROMISE_URL: {
code: 11,
message: 'Can\'t have more than one active PUSH_PROMISE per URL.',
},
QUIC_PROMISE_VARY_MISMATCH: {
code: 12,
message: 'Vary check failed.',
},
QUIC_INVALID_PROMISE_METHOD: {
code: 13,
message: 'Only GET and HEAD methods allowed.',
},
QUIC_PUSH_STREAM_TIMED_OUT: {
code: 14,
message: 'The push stream is unclaimed and timed out.',
},
QUIC_HEADERS_TOO_LARGE: {
code: 15,
message: 'Received headers were too large.',
},
QUIC_STREAM_LAST_ERROR: {
code: 16,
message: 'No error', // No error. Used as bound while iterating.
},
})
const errors = Object.assign(Object.create(null) as object, {
QUIC_NO_ERROR: {
code: 0,
message: '',
},
QUIC_INTERNAL_ERROR: {
code: 1,
message: 'Connection has reached an invalid state.',
},
QUIC_STREAM_DATA_AFTER_TERMINATION: {
code: 2,
message: 'There were data frames after the a fin or reset.',
},
QUIC_INVALID_PACKET_HEADER: {
code: 3,
message: 'Control frame is malformed.',
},
QUIC_INVALID_FRAME_DATA: {
code: 4,
message: 'Frame data is malformed.',
},
QUIC_MISSING_PAYLOAD: {
code: 48,
message: 'The packet contained no payload.',
},
QUIC_INVALID_FEC_DATA: {
code: 5,
message: 'FEC data is malformed.',
},
QUIC_INVALID_STREAM_DATA: {
code: 46,
message: 'STREAM frame data is malformed.',
},
QUIC_OVERLAPPING_STREAM_DATA: {
code: 87,
message: 'STREAM frame data overlaps with buffered data.',
},
QUIC_UNENCRYPTED_STREAM_DATA: {
code: 61,
message: 'Received STREAM frame data is not encrypted.',
},
QUIC_ATTEMPT_TO_SEND_UNENCRYPTED_STREAM_DATA: {
code: 88,
message: 'Attempt to send unencrypted STREAM frame.',
},
QUIC_MAYBE_CORRUPTED_MEMORY: {
code: 89,
message: 'Received a frame which is likely the result of memory corruption.',
},
QUIC_UNENCRYPTED_FEC_DATA: {
code: 77,
message: 'FEC frame data is not encrypted.',
},
QUIC_INVALID_RST_STREAM_DATA: {
code: 6,
message: 'RST_STREAM frame data is malformed.',
},
QUIC_INVALID_CONNECTION_CLOSE_DATA: {
code: 7,
message: 'CONNECTION_CLOSE frame data is malformed.',
},
QUIC_INVALID_GOAWAY_DATA: {
code: 8,
message: 'GOAWAY frame data is malformed.',
},
QUIC_INVALID_WINDOW_UPDATE_DATA: {
code: 57,
message: 'WINDOW_UPDATE frame data is malformed.',
},
QUIC_INVALID_BLOCKED_DATA: {
code: 58,
message: 'BLOCKED frame data is malformed.',
},
QUIC_INVALID_STOP_WAITING_DATA: {
code: 60,
message: 'STOP_WAITING frame data is malformed.',
},
QUIC_INVALID_PATH_CLOSE_DATA: {
code: 78,
message: 'PATH_CLOSE frame data is malformed.',
},
QUIC_INVALID_ACK_DATA: {
code: 9,
message: 'ACK frame data is malformed.',
},
QUIC_INVALID_VERSION_NEGOTIATION_PACKET: {
code: 10,
message: 'Version negotiation packet is malformed.',
},
QUIC_INVALID_PUBLIC_RST_PACKET: {
code: 11,
message: 'Public RST packet is malformed.',
},
QUIC_DECRYPTION_FAILURE: {
code: 12,
message: 'There was an error decrypting.',
},
QUIC_ENCRYPTION_FAILURE: {
code: 13,
message: 'There was an error encrypting.',
},
QUIC_PACKET_TOO_LARGE: {
code: 14,
message: 'The packet exceeded kMaxPacketSize.',
},
QUIC_PEER_GOING_AWAY: {
code: 16,
message: 'The peer is going away. May be a client or server.',
},
QUIC_INVALID_STREAM_ID: {
code: 17,
message: 'A stream ID was invalid.',
},
QUIC_INVALID_PRIORITY: {
code: 49,
message: 'A priority was invalid.',
},
QUIC_TOO_MANY_OPEN_STREAMS: {
code: 18,
message: 'Too many streams already open.',
},
QUIC_TOO_MANY_AVAILABLE_STREAMS: {
code: 76,
message: 'The peer created too many available streams.',
},
QUIC_PUBLIC_RESET: {
code: 19,
message: 'Received public reset for this connection.',
},
QUIC_INVALID_VERSION: {
code: 20,
message: 'Invalid protocol version.',
},
QUIC_INVALID_HEADER_ID: {
code: 22,
message: 'The Header ID for a stream was too far from the previous.',
},
QUIC_INVALID_NEGOTIATED_VALUE: {
code: 23,
message: 'Negotiable parameter received during handshake had invalid value.',
},
QUIC_DECOMPRESSION_FAILURE: {
code: 24,
message: 'There was an error decompressing data.',
},
QUIC_NETWORK_IDLE_TIMEOUT: {
code: 25,
message: 'The connection timed out due to no network activity.',
},
QUIC_HANDSHAKE_TIMEOUT: {
code: 67,
message: 'The connection timed out waiting for the handshake to complete.',
},
QUIC_ERROR_MIGRATING_ADDRESS: {
code: 26,
message: 'There was an error encountered migrating addresses.',
},
QUIC_ERROR_MIGRATING_PORT: {
code: 86,
message: 'There was an error encountered migrating port only.',
},
QUIC_PACKET_WRITE_ERROR: {
code: 27,
message: 'There was an error while writing to the socket.',
},
QUIC_PACKET_READ_ERROR: {
code: 51,
message: 'There was an error while reading from the socket.',
},
QUIC_EMPTY_STREAM_FRAME_NO_FIN: {
code: 50,
message: 'We received a STREAM_FRAME with no data and no fin flag set.',
},
QUIC_INVALID_HEADERS_STREAM_DATA: {
code: 56,
message: 'We received invalid data on the headers stream.',
},
QUIC_HEADERS_STREAM_DATA_DECOMPRESS_FAILURE: {
code: 97,
message: 'Invalid data on the headers stream received because of decompression failure.',
},
QUIC_FLOW_CONTROL_RECEIVED_TOO_MUCH_DATA: {
code: 59,
message: 'The peer received too much data, violating flow control.',
},
QUIC_FLOW_CONTROL_SENT_TOO_MUCH_DATA: {
code: 63,
message: 'The peer sent too much data, violating flow control.',
},
QUIC_FLOW_CONTROL_INVALID_WINDOW: {
code: 64,
message: 'The peer received an invalid flow control window.',
},
QUIC_CONNECTION_IP_POOLED: {
code: 62,
message: 'The connection has been IP pooled into an existing connection.',
},
QUIC_TOO_MANY_OUTSTANDING_SENT_PACKETS: {
code: 68,
message: 'The connection has too many outstanding sent packets.',
},
QUIC_TOO_MANY_OUTSTANDING_RECEIVED_PACKETS: {
code: 69,
message: 'The connection has too many outstanding received packets.',
},
QUIC_CONNECTION_CANCELLED: {
code: 70,
message: 'The quic connection has been cancelled.',
},
QUIC_BAD_PACKET_LOSS_RATE: {
code: 71,
message: 'Disabled QUIC because of high packet loss rate.',
},
QUIC_PUBLIC_RESETS_POST_HANDSHAKE: {
code: 73,
message: 'Disabled QUIC because of too many PUBLIC_RESETs post handshake.',
},
QUIC_FAILED_TO_SERIALIZE_PACKET: {
code: 75,
message: 'Closed because we failed to serialize a packet.',
},
QUIC_TOO_MANY_RTOS: {
code: 85,
message: 'QUIC timed out after too many RTOs.',
},
QUIC_HANDSHAKE_FAILED: {
code: 28,
message: 'Hanshake failed.',
},
QUIC_CRYPTO_TAGS_OUT_OF_ORDER: {
code: 29,
message: 'Handshake message contained out of order tags.',
},
QUIC_CRYPTO_TOO_MANY_ENTRIES: {
code: 30,
message: 'Handshake message contained too many entries.',
},
QUIC_CRYPTO_INVALID_VALUE_LENGTH: {
code: 31,
message: 'Handshake message contained an invalid value length.',
},
QUIC_CRYPTO_MESSAGE_AFTER_HANDSHAKE_COMPLETE: {
code: 32,
message: 'A crypto message was received after the handshake was complete.',
},
QUIC_INVALID_CRYPTO_MESSAGE_TYPE: {
code: 33,
message: 'A crypto message was received with an illegal message tag.',
},
QUIC_INVALID_CRYPTO_MESSAGE_PARAMETER: {
code: 34,
message: 'A crypto message was received with an illegal parameter.',
},
QUIC_INVALID_CHANNEL_ID_SIGNATURE: {
code: 52,
message: 'An invalid channel id signature was supplied.',
},
QUIC_CRYPTO_MESSAGE_PARAMETER_NOT_FOUND: {
code: 35,
message: 'A crypto message was received with a mandatory parameter missing.',
},
QUIC_CRYPTO_MESSAGE_PARAMETER_NO_OVERLAP: {
code: 36,
message: 'A crypto message was received with a parameter that has no overlap with the local parameter.',
},
QUIC_CRYPTO_MESSAGE_INDEX_NOT_FOUND: {
code: 37,
message: 'A crypto message was received that contained a parameter with too few values.',
},
QUIC_UNSUPPORTED_PROOF_DEMAND: {
code: 94,
message: 'A demand for an unsupport proof type was received.',
},
QUIC_CRYPTO_INTERNAL_ERROR: {
code: 38,
message: 'An internal error occured in crypto processing.',
},
QUIC_CRYPTO_VERSION_NOT_SUPPORTED: {
code: 39,
message: 'A crypto handshake message specified an unsupported version.',
},
QUIC_CRYPTO_HANDSHAKE_STATELESS_REJECT: {
code: 72,
message: 'A crypto handshake message resulted in a stateless reject.',
},
QUIC_CRYPTO_NO_SUPPORT: {
code: 40,
message: 'There was no intersection between the crypto primitives supported by the peer and ourselves.',
},
QUIC_CRYPTO_TOO_MANY_REJECTS: {
code: 41,
message: 'The server rejected our client hello messages too many times.',
},
QUIC_PROOF_INVALID: {
code: 42,
message: 'The client rejected the server\'s certificate chain or signature.',
},
QUIC_CRYPTO_DUPLICATE_TAG: {
code: 43,
message: 'A crypto message was received with a duplicate tag.',
},
QUIC_CRYPTO_ENCRYPTION_LEVEL_INCORRECT: {
code: 44,
// i.e. it should have been encrypted but was not.
message: 'A crypto message was received with the wrong encryption level.',
},
QUIC_CRYPTO_SERVER_CONFIG_EXPIRED: {
code: 45,
message: 'The server config for a server has expired.',
},
QUIC_CRYPTO_SYMMETRIC_KEY_SETUP_FAILED: {
code: 53,
message: 'We failed to setup the symmetric keys for a connection.',
},
QUIC_CRYPTO_MESSAGE_WHILE_VALIDATING_CLIENT_HELLO: {
code: 54,
message: 'A handshake message arrived, but we are still validating the previous handshake message.',
},
QUIC_CRYPTO_UPDATE_BEFORE_HANDSHAKE_COMPLETE: {
code: 65,
message: 'A server config update arrived before the handshake is complete.',
},
QUIC_CRYPTO_CHLO_TOO_LARGE: {
code: 90,
message: 'CHLO cannot fit in one packet.',
},
QUIC_VERSION_NEGOTIATION_MISMATCH: {
code: 55,
message: 'This connection involved a version negotiation which appears to have been tampered with.',
},
QUIC_BAD_MULTIPATH_FLAG: {
code: 79,
message: 'Multipath is not enabled, but a packet with multipath flag on is received.',
},
QUIC_MULTIPATH_PATH_DOES_NOT_EXIST: {
code: 91,
message: 'A path is supposed to exist but does not.',
},
QUIC_MULTIPATH_PATH_NOT_ACTIVE: {
code: 92,
message: 'A path is supposed to be active but is not.',
},
QUIC_IP_ADDRESS_CHANGED: {
code: 80,
message: 'IP address changed causing connection close.',
},
QUIC_CONNECTION_MIGRATION_NO_MIGRATABLE_STREAMS: {
code: 81,
message: 'Network changed, but connection had no migratable streams.',
},
QUIC_CONNECTION_MIGRATION_TOO_MANY_CHANGES: {
code: 82,
message: 'Connection changed networks too many times.',
},
QUIC_CONNECTION_MIGRATION_NO_NEW_NETWORK: {
code: 83,
message: 'Connection migration was attempted, but there was no new network to migrate to.',
},
QUIC_CONNECTION_MIGRATION_NON_MIGRATABLE_STREAM: {
code: 84,
message: 'Network changed, but connection had one or more non-migratable streams.',
},
QUIC_TOO_MANY_FRAME_GAPS: {
code: 93,
message: 'Stream frames arrived too discontiguously so that stream sequencer buffer maintains too many gaps.',
},
QUIC_STREAM_SEQUENCER_INVALID_STATE: {
code: 95,
message: 'Sequencer buffer get into weird state where continuing read/write will lead to crash.',
},
QUIC_TOO_MANY_SESSIONS_ON_SERVER: {
code: 96,
message: 'Connection closed because of server hits max number of sessions allowed.',
},
QUIC_LAST_ERROR: {
code: 98,
// No error. Used as bound while iterating.
message: 'No error',
},
})
const _errors = errors as any
for (const key of Object.keys(errors)) {
const error = _errors[key]
error.name = key
_errors[error.code] = error
}
const _streamErrors = streamErrors as any
for (const key of Object.keys(streamErrors)) {
const error = _streamErrors[key]
error.name = key
_streamErrors[error.code] = error
}
/** QUICError representing a QUIC Error. */
export class QUICError extends Error {
static fromError (err: any = errors.QUIC_NO_ERROR): QUICError {
if (err instanceof QUICError) {
return err
}
switch (typeof err) {
case 'string':
return new QUICError(err)
case 'number':
return new QUICError(err)
}
return new QUICError(err.code >= 0 ? err.code : err.message)
}
static fromBuffer (bufv: BufferVisitor): QUICError {
bufv.walk(4)
if (bufv.isOutside()) {
throw new QUICError('INVALID_ERROR_CODE')
}
const code = bufv.buf.readUInt32BE(bufv.start)
return new QUICError(code)
}
static checkAny (err?: any): QUICError | QUICStreamError | null {
if (err == null) {
return null
}
if (err instanceof QUICStreamError) {
return err
}
return QUICError.fromError(err)
}
name: string
code: number
constructor (nameOrCode: string | number) {
const error = _errors[nameOrCode] == null ? INVALID_ERROR : _errors[nameOrCode]
super(error !== INVALID_ERROR ? error.message : nameOrCode)
this.name = error.name
this.code = error.code
Error.captureStackTrace(this, QUICError)
}
get isNoError () {
return this.code === errors.QUIC_NO_ERROR.code
}
valueOf () {
return {
name: this.name,
code: this.code,
message: this.message,
}
}
byteLen (): number {
return 4
}
writeTo (bufv: BufferVisitor): BufferVisitor {
bufv.walk(4)
if (bufv.isOutside()) {
throw new QUICError('INVALID_ERROR_CODE')
}
bufv.buf.writeUInt32BE(this.code, bufv.start)
return bufv
}
}
/** QUICError representing a QUIC Stream Error. */
export class QUICStreamError extends Error {
static fromError (err: any = streamErrors.QUIC_STREAM_NO_ERROR): QUICStreamError {
if (err instanceof QUICStreamError) {
return err
}
switch (typeof err) {
case 'string':
return new QUICStreamError(err)
case 'number':
return new QUICStreamError(err)
}
return new QUICStreamError(err.code >= 0 ? err.code : err.message)
}
static fromBuffer (bufv: BufferVisitor): QUICStreamError {
bufv.walk(4)
if (bufv.isOutside()) {
throw new QUICError('INVALID_ERROR_CODE')
}
const code = bufv.buf.readUInt32BE(bufv.start)
return new QUICStreamError(code)
}
static checkAny (err?: any): QUICError | QUICStreamError | null {
if (err == null) {
return null
}
if (err instanceof QUICError) {
return err
}
return QUICStreamError.fromError(err)
}
name: string
code: number
constructor (nameOrCode: string | number) {
const error = _streamErrors[nameOrCode] == null ? INVALID_RST_STREAM_ERROR : _streamErrors[nameOrCode]
super(error !== INVALID_RST_STREAM_ERROR ? error.message : nameOrCode)
this.name = error.name
this.code = error.code
Error.captureStackTrace(this, QUICStreamError)
}
get isNoError () {
return this.code === streamErrors.QUIC_STREAM_NO_ERROR.code
}
valueOf () {
return {
name: this.name,
code: this.code,
message: this.message,
}
}
byteLen (): number {
return 4
}
writeTo (bufv: BufferVisitor): BufferVisitor {
bufv.walk(4)
if (bufv.isOutside()) {
throw new QUICError('INVALID_ERROR_CODE')
}
bufv.buf.writeUInt32BE(this.code, bufv.start)
return bufv
}
}
export const QuicError = Object.assign(QUICError, errors)
export const StreamError = Object.assign(QUICStreamError, streamErrors) | the_stack |
'use strict';
// Bring key classes into scope, most importantly Fabric SDK network class
import * as x509 from '@ampretia/x509';
import { Block, Channel } from 'fabric-client';
import { Contract, FileSystemCheckpointer, FileSystemWallet, Gateway, Network } from 'fabric-network';
import * as fs from 'fs';
import * as path from 'path';
import { GenesisCheckpointer } from './checkpointer';
import { IFabricConfig } from './interfaces/config';
import { IChaincodeMetadata } from './interfaces/metadata';
import { IUser } from './interfaces/users';
import Utils from './utils';
export interface ITransaction {
timestamp: number;
contract: string;
name: string;
parameters: {
[s: string]: any;
};
txId: string;
caller: {
identity: string;
msp: string;
};
}
export interface IBlock {
number: number;
transactions: any[];
}
export default class FabricProxy {
public readonly ccp: any;
public readonly wallet: FileSystemWallet;
private config: IFabricConfig;
constructor(config: IFabricConfig) {
const walletpath = path.resolve(process.cwd(), config.walletPath);
this.wallet = new FileSystemWallet(walletpath);
this.config = config;
const ccpPath = path.resolve(process.cwd(), config.connectionProfilePath);
const ccpJSON = fs.readFileSync(ccpPath, 'utf8');
this.ccp = JSON.parse(ccpJSON);
}
public async getUser(username: string): Promise<IUser> {
if (!(await this.wallet.exists(username))) {
throw new Error('could not find identity in wallet: ' + username);
}
const identity = await this.wallet.export(username) as any;
return Utils.certToUser(identity.certificate);
}
public async getMetadata(user: string = 'admin'): Promise<IChaincodeMetadata> {
const metadataBuff = await this.evaluateTransaction(user, 'org.hyperledger.fabric:GetMetadata');
return JSON.parse(metadataBuff.toString()) as IChaincodeMetadata;
}
public async evaluateTransaction(user: string, functionName: string, ...args: string[]): Promise<Buffer> {
return this.handleTransaction('evaluateTransaction', user, functionName, ...args);
}
public async submitTransaction(user: string, functionName: string, ...args: string[]): Promise<Buffer> {
return this.handleTransaction('submitTransaction', user, functionName, ...args);
}
public async addContractListener(
user: string, listenerName: string, eventName: string, callback: any, options: any,
) {
const gateway: Gateway = await this.setupGateway(user);
const contract = await this.getContract(gateway);
options = Object.assign(this.getCheckpointerOptions(), options);
await contract.addContractListener(listenerName, eventName, callback, options);
}
public async addBlockListener(
user: string, listenerName: string, callback: any, options: any,
) {
const gateway: Gateway = await this.setupGateway(user);
const network = await this.getNetwork(gateway);
options = Object.assign(this.getCheckpointerOptions(), options);
await network.addBlockListener(listenerName, (async (err, block: Block) => {
if (err) {
callback(err, null);
return;
}
callback(null, this.formatBlock(block, await this.getMetadata(user)));
}) as any, options);
}
public async getHistory(user: string, options: any): Promise<IBlock[]> {
const gateway: Gateway = await this.setupGateway(user);
const network: Network = await this.getNetwork(gateway);
const channel: Channel = network.getChannel();
const metadata = await this.getMetadata();
const info = await channel.queryInfo();
const height = (info.height as Long).toInt();
options = Object.assign({
checkpointer: {
factory: () => {
return new GenesisCheckpointer();
},
options: {},
},
}, options);
let list: any;
const blocks: IBlock[] = [];
await new Promise(async (resolve , reject) => {
const callback = async (err, block: Block): Promise<any> => {
if (err) {
return reject(err);
}
blocks.push(this.formatBlock(block, metadata));
if (Number(block.header.number) === height - 1) { // *1 as actually a string
resolve(true);
}
};
list = await network.addBlockListener('block-history-' + new Date().getTime(), callback as any, options);
});
list.unregister();
return blocks;
}
private async handleTransaction(
type: 'evaluateTransaction'| 'submitTransaction', user: string, functionName: string, ...args: string[]
): Promise<Buffer> {
try {
const gateway: Gateway = await this.setupGateway(user);
const contract: Contract = await this.getContract(gateway);
const buff: Buffer = await contract[type](`${functionName}`, ...args);
gateway.disconnect();
return buff;
} catch (error) {
throw error;
}
}
private async getContract(gateway: Gateway): Promise<Contract> {
try {
const network = await this.getNetwork(gateway);
return await network.getContract(this.config.contractName);
} catch (err) {
throw new Error('Error connecting to channel. Does channel name exist? ERROR:' + err.message);
}
}
private async getNetwork(gateway: Gateway): Promise<Network> {
try {
return await gateway.getNetwork(this.config.channelName);
} catch (err) {
throw new Error('Error connecting to channel. Does channel name exist? ERROR:' + err.message);
}
}
private async setupGateway(user: string): Promise<Gateway> {
try {
const gateway = new Gateway();
// Set connection options; use 'admin' identity from application wallet
const connectionOptions = {
clientTlsIdentity: user,
discovery: {enabled: false},
identity: user,
wallet: this.wallet,
};
// console.log('CONNECTION', connectionOptions);
// Connect to gateway using application specified parameters
await gateway.connect(this.ccp, connectionOptions);
return gateway;
} catch (error) {
throw error;
}
}
private getCheckpointerOptions(): any {
return {
checkpointer: {
factory: (channelName, fListenerName, fOptions) => {
return new FileSystemCheckpointer(channelName, fListenerName, fOptions);
},
options: {basePath: `checkpointers/${this.config.org}-checkpointer`},
},
};
}
private formatBlock(block: Block, metadata: IChaincodeMetadata): IBlock {
let blockTransactions: ITransaction[] = [];
block.data.data.forEach((data) => {
if (data.payload.data.hasOwnProperty('actions')) {
const date = new Date(data.payload.header.channel_header.timestamp);
const timestamp = date.getTime();
const txId = data.payload.header.channel_header.tx_id;
data.payload.data.actions.forEach((action) => {
const callArgs =
action.payload.chaincode_proposal_payload.input.chaincode_spec.input.args.map(
(arg) => {
return arg.toString();
},
);
const nsFcn = /([^:]*)(?::|^)(.*)/g.exec(callArgs[0]);
const params = callArgs.slice(1);
let contractName = nsFcn[1];
const functionName = nsFcn[2];
if (!contractName || contractName.trim() === '') {
for (const key in metadata.contracts) {
if (metadata.contracts[key].contractInstance.default) {
contractName = key;
}
}
}
const contract = metadata.contracts[contractName];
const transactionMetadata = contract.transactions.find((transaction) => {
return transaction.name === functionName;
});
const formattedParams = {};
params.forEach((param, index) => {
let value = param;
if (transactionMetadata.parameters[index].schema.type !== 'string') {
value = JSON.parse(param); // MAKING ASSUMPTION THAT CONTRACT
// IS USING STANDARD JSON PARSER OF CONTRACT API
}
formattedParams[transactionMetadata.parameters[index].name] = value;
});
const x509Cert = x509.parseCert(action.header.creator.IdBytes.toString());
blockTransactions.push({
caller: {
identity: x509Cert.subject.commonName,
msp: action.header.creator.Mspid,
},
contract: contractName,
name: functionName,
parameters: formattedParams,
timestamp,
txId,
});
});
}
});
// REMOVE INVALID TRANSACTIONS
blockTransactions = blockTransactions.filter((_, index) => {
return block.metadata.metadata[2][index] === 0;
});
return {
number: Number(block.header.number), // *1 as actually a string
transactions: blockTransactions,
};
}
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/signalRPrivateEndpointConnectionsMappers";
import * as Parameters from "../models/parameters";
import { SignalRManagementClientContext } from "../signalRManagementClientContext";
/** Class representing a SignalRPrivateEndpointConnections. */
export class SignalRPrivateEndpointConnections {
private readonly client: SignalRManagementClientContext;
/**
* Create a SignalRPrivateEndpointConnections.
* @param {SignalRManagementClientContext} client Reference to the service client.
*/
constructor(client: SignalRManagementClientContext) {
this.client = client;
}
/**
* List private endpoint connections
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRPrivateEndpointConnectionsListResponse>
*/
list(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRPrivateEndpointConnectionsListResponse>;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param callback The callback
*/
list(
resourceGroupName: string,
resourceName: string,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): void;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param options The optional parameters
* @param callback The callback
*/
list(
resourceGroupName: string,
resourceName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): void;
list(
resourceGroupName: string,
resourceName: string,
options?:
| msRest.RequestOptionsBase
| msRest.ServiceCallback<Models.PrivateEndpointConnectionList>,
callback?: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): Promise<Models.SignalRPrivateEndpointConnectionsListResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
resourceName,
options
},
listOperationSpec,
callback
) as Promise<Models.SignalRPrivateEndpointConnectionsListResponse>;
}
/**
* Get the specified private endpoint connection
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRPrivateEndpointConnectionsGetResponse>
*/
get(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRPrivateEndpointConnectionsGetResponse>;
/**
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param callback The callback
*/
get(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): void;
/**
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param options The optional parameters
* @param callback The callback
*/
get(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): void;
get(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.PrivateEndpointConnection>,
callback?: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): Promise<Models.SignalRPrivateEndpointConnectionsGetResponse> {
return this.client.sendOperationRequest(
{
privateEndpointConnectionName,
resourceGroupName,
resourceName,
options
},
getOperationSpec,
callback
) as Promise<Models.SignalRPrivateEndpointConnectionsGetResponse>;
}
/**
* Update the state of specified private endpoint connection
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param parameters The resource of private endpoint and its properties
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRPrivateEndpointConnectionsUpdateResponse>
*/
update(
privateEndpointConnectionName: string,
parameters: Models.PrivateEndpointConnection,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRPrivateEndpointConnectionsUpdateResponse>;
/**
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param parameters The resource of private endpoint and its properties
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param callback The callback
*/
update(
privateEndpointConnectionName: string,
parameters: Models.PrivateEndpointConnection,
resourceGroupName: string,
resourceName: string,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): void;
/**
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param parameters The resource of private endpoint and its properties
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param options The optional parameters
* @param callback The callback
*/
update(
privateEndpointConnectionName: string,
parameters: Models.PrivateEndpointConnection,
resourceGroupName: string,
resourceName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): void;
update(
privateEndpointConnectionName: string,
parameters: Models.PrivateEndpointConnection,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.PrivateEndpointConnection>,
callback?: msRest.ServiceCallback<Models.PrivateEndpointConnection>
): Promise<Models.SignalRPrivateEndpointConnectionsUpdateResponse> {
return this.client.sendOperationRequest(
{
privateEndpointConnectionName,
parameters,
resourceGroupName,
resourceName,
options
},
updateOperationSpec,
callback
) as Promise<Models.SignalRPrivateEndpointConnectionsUpdateResponse>;
}
/**
* Delete the specified private endpoint connection
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(
privateEndpointConnectionName,
resourceGroupName,
resourceName,
options
).then((lroPoller) => lroPoller.pollUntilFinished());
}
/**
* Delete the specified private endpoint connection
* @param privateEndpointConnectionName The name of the private endpoint connection
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(
privateEndpointConnectionName: string,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
privateEndpointConnectionName,
resourceGroupName,
resourceName,
options
},
beginDeleteMethodOperationSpec,
options
);
}
/**
* List private endpoint connections
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRPrivateEndpointConnectionsListNextResponse>
*/
listNext(
nextPageLink: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRPrivateEndpointConnectionsListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(
nextPageLink: string,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(
nextPageLink: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): void;
listNext(
nextPageLink: string,
options?:
| msRest.RequestOptionsBase
| msRest.ServiceCallback<Models.PrivateEndpointConnectionList>,
callback?: msRest.ServiceCallback<Models.PrivateEndpointConnectionList>
): Promise<Models.SignalRPrivateEndpointConnectionsListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback
) as Promise<Models.SignalRPrivateEndpointConnectionsListNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/privateEndpointConnections",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnectionList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.privateEndpointConnectionName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.resourceName
],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.privateEndpointConnectionName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.resourceName
],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.PrivateEndpointConnection,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}",
urlParameters: [
Parameters.privateEndpointConnectionName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.resourceName
],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {},
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [Parameters.nextPageLink],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.PrivateEndpointConnectionList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
}; | the_stack |
module WinJSTests {
"use strict";
var testRootEl;
var ITEMS_COUNT = 10;
var _oldMaxTimePerCreateContainers;
function getDataObject(pattern, counter, title?) {
var object: any = {
enableCellSpanning: true,
group: counter % 5,
title: (title ? title : "Tile" + counter),
};
switch (pattern.charAt(0)) {
case 'b':
object.className = "multisizeBigTile";
break;
case 'm':
object.className = "multisizeMediumTile";
break;
default:
object.className = "multisizeSmallTile";
break;
}
return object;
}
function createDataSource(items) {
// Create the datasource
var controller = {
directivesForMethod: function (method, args) {
return {
callMethodSynchronously: true,
sendChangeNotifications: true,
countBeforeDelta: 0,
countAfterDelta: 0,
countBeforeOverride: -1,
countAfterOverride: -1
};
}
},
// Data adapter abilities
abilities = null;
return Helper.ItemsManager.createTestDataSource(items, controller, abilities);
}
function setupListView(element, layout, multisizeMode, items?, bindingList?) {
function groupKey(item) {
return (item.data ? item.data.group.toString() : item.group.toString());
}
function groupData(item) {
return {
title: (item.data ? item.data.group.toString() : item.group.toString())
};
}
if (!items) {
var patternOptions = ['m', 's', 'b'];
items = [];
for (var i = 0; i < ITEMS_COUNT; ++i) {
var pattern = (multisizeMode ? patternOptions[i % 3] : patternOptions[0]);
items[i] = getDataObject(pattern, i);
}
items.sort(function (a, b) { return a.group - b.group; });
}
var testDataSrc,
testDataSrcGroups;
if (bindingList) {
var testDataList = (new WinJS.Binding.List(items.slice(0))).createGrouped(groupKey, groupData);
testDataSrc = testDataList.dataSource;
testDataSrcGroups = testDataList.groups.dataSource;
} else {
testDataSrc = WinJS.UI.computeDataSourceGroups(createDataSource(items.slice(0)), groupKey, groupData);
testDataSrcGroups = testDataSrc.groups;
}
var layoutOptions: any = {};
if (multisizeMode) {
layoutOptions.groupInfo = {
enableCellSpanning: true,
cellWidth: 100,
cellHeight: 100
};
layoutOptions.itemInfo = function (itemIndex) {
return testDataSrc.itemFromIndex(itemIndex).then(function (item) {
switch (item.data.className) {
case "multisizeBigTile":
return { width: 400, height: 200 };
break;
case "multisizeMediumTile":
return { width: 200, height: 200 };
break;
case "multisizeSmallTile":
return { width: 300, height: 100 };
break;
}
});
};
}
return new WinJS.UI.ListView(element, {
itemDataSource: testDataSrc,
groupDataSource: testDataSrcGroups,
selectionMode: "multi",
itemTemplate: Helper.ListView.createRenderer("simpleTemplate"),
groupHeaderTemplate: Helper.ListView.createRenderer("simpleHeaderTemplate", "groupListEditorTest_groupheader_"),
layout: new WinJS.UI[layout](layoutOptions)
});
}
function checkTile(listview, index, left, top, tileType, title) {
var tile = listview.elementFromIndex(index),
container = Helper.ListView.containerFrom(tile),
viewBoundsOffset = parseInt(listview._viewport.style["msScrollLimitXMin"]) || 0;
LiveUnit.Assert.areEqual((title ? title : "Tile" + index), tile.textContent.trim());
LiveUnit.Assert.areEqual(left, Helper.ListView.offsetLeftFromSurface(listview, container) - viewBoundsOffset, "Error in tile " + index);
LiveUnit.Assert.areEqual(top, Helper.ListView.offsetTopFromSurface(listview, container), "Error in tile " + index);
if (listview.layout.groupInfo) {
var width = container.offsetWidth,
height = container.offsetHeight;
switch (tileType) {
case "b":
LiveUnit.Assert.areEqual(400, width, "Error in tile " + index);
LiveUnit.Assert.areEqual(200, height, "Error in tile " + index);
break;
case "m":
LiveUnit.Assert.areEqual(200, width, "Error in tile " + index);
LiveUnit.Assert.areEqual(200, height, "Error in tile " + index);
break;
case "s":
LiveUnit.Assert.areEqual(300, width, "Error in tile " + index);
LiveUnit.Assert.areEqual(100, height, "Error in tile " + index);
break;
}
}
}
function checkHeader(listView, groupIndex, left, top, id, caption) {
var tile = document.getElementById(id + groupIndex),
container = Helper.ListView.headerContainerFrom(listView, tile),
viewBoundsOffset = parseInt(listView._viewport.style["msScrollLimitXMin"]) || 0;
LiveUnit.Assert.areEqual(caption ? caption : String.fromCharCode("A".charCodeAt(0) + groupIndex), tile.textContent.trim());
LiveUnit.Assert.areEqual(left, Helper.ListView.offsetLeftFromSurface(listView, container) - viewBoundsOffset);
LiveUnit.Assert.areEqual(top, Helper.ListView.offsetTopFromSurface(listView, container));
}
export class GroupListEditorTest {
// This is the setup function that will be called at the beginning of each test function.
setUp() {
LiveUnit.LoggingCore.logComment("In setup");
testRootEl = document.createElement("div");
testRootEl.className = "file-listview-css";
var newNode = document.createElement("div");
newNode.id = "groupListEditorTest";
newNode.style.height = "200px";
newNode.style.width = "1024px";
newNode.innerHTML =
"<div id='listEditorTest'></div>" +
"<div id='simpleHeaderTemplate' class='listEditorTestClass' style='display: none; width:100px; height:100px'>" +
" <div>{{title}}</div>" +
"</div>" +
"<div id='simpleTemplate' class='{{className}}' style='display: none;'>" +
" <div>{{title}}</div>" +
"</div>";
testRootEl.appendChild(newNode);
document.body.appendChild(testRootEl);
//WinBlue: 298587
_oldMaxTimePerCreateContainers = WinJS.UI._VirtualizeContentsView._maxTimePerCreateContainers;
WinJS.UI._VirtualizeContentsView._maxTimePerCreateContainers = Number.MAX_VALUE;
}
tearDown() {
LiveUnit.LoggingCore.logComment("In tearDown");
WinJS.UI._VirtualizeContentsView._maxTimePerCreateContainers = _oldMaxTimePerCreateContainers;
WinJS.Utilities.disposeSubTree(testRootEl);
document.body.removeChild(testRootEl);
}
}
function generate(name, testFunction, items?) {
function generateTest(layout, multisize, bindingList) {
var fullName = name + "_" + (multisize ? "multisize_grouped_grid" : "normal_grouped_grid")
+ (bindingList ? "_BindingList" : "_TestDataSource") + (layout == "GridLayout" ? "" : "_" + layout);
GroupListEditorTest.prototype[fullName] = function (complete) {
LiveUnit.LoggingCore.logComment("in " + fullName);
var element = document.getElementById("listEditorTest");
var listview = setupListView(element, layout, multisize, items, bindingList);
testFunction(listview, complete);
};
}
//normal grouped with test data source
generateTest("GridLayout", false, false);
//normal grouped with Binding.List
generateTest("GridLayout", false, true);
if (Helper.Browser.supportsCSSGrid) {
//grouped multisize with test data source
generateTest("GridLayout", true, false);
//grouped multisize with Binding.List
generateTest("GridLayout", true, true);
}
}
generate("testInsertBefore", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testInsertBefore");
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0]);
}).
then(function (dataObjects) {
return listView.itemDataSource.insertBefore(null, getDataObject('m', 0, "NewTile"), dataObjects[0].key).then(function () {
return listView.itemDataSource.insertBefore(null, getDataObject('s', 0, "NewTile"), dataObjects[0].key);
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([2], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "NewTile");
checkTile(listView, 1, 270, 120, 's', "NewTile");
}).
then(complete);
});
generate("testInsertAtStart", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testInsertAtStart");
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0]);
}).
then(function (dataObjects) {
return listView.itemDataSource.insertAtStart(null, getDataObject('s', 0, "NewTile")).
then(function () {
return listView.itemDataSource.insertAtStart(null, getDataObject('m', 0, "NewTile"));
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([2], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "NewTile");
checkTile(listView, 1, 270, 120, 's', "NewTile");
}).
then(complete);
});
generate("testInsertAfter", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testInsertAfter");
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return Helper.ListView.getDataObjects(listView.itemDataSource, [0]);
}).
then(function (dataObjects) {
// throws an exception: Line: 6924 in ui.js
// Error: Unable to get value of the property 'count': object is null or undefined
return listView.itemDataSource.insertAfter(null, getDataObject('s', 0, "NewTile"), dataObjects[0].key).
then(function () {
return listView.itemDataSource.insertAfter(null, getDataObject('m', 0, "NewTile"), dataObjects[0].key);
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 1, 270, 120, 'm', "NewTile");
checkTile(listView, 2, 470, 120, 's', "NewTile");
}).
then(complete);
});
generate("testInsertAtEnd", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testInsertAtEnd");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0]);
}).
then(function (dataObjects) {
return listView.itemDataSource.insertAtEnd(null, getDataObject('m', ITEMS_COUNT - 1, "NewTile")).
then(function () {
return listView.itemDataSource.insertAtEnd(null, getDataObject('s', ITEMS_COUNT - 1, "NewTile"));
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT + 2, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
}).
then(function () {
listView.ensureVisible(ITEMS_COUNT + 1);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
if (isMultisizeTest) {
checkTile(listView, (ITEMS_COUNT + 2) - 2, 3250, 120, 'm', "NewTile");
checkTile(listView, (ITEMS_COUNT + 2) - 1, 3450, 120, 's', "NewTile");
} else {
checkTile(listView, (ITEMS_COUNT + 2) - 2, 2350, 120, 'm', "NewTile");
checkTile(listView, (ITEMS_COUNT + 2) - 1, 2550, 120, 's', "NewTile");
}
}).
then(complete);
});
var bigDataSet = [];
for (var i = 0; i < 100; ++i) {
bigDataSet.push(getDataObject('m', i));
}
bigDataSet.sort(function (a, b) { return a.group - b.group; });
generate("testInsertOutsideOfRealizedRange", function (listView, complete) {
Helper.ListView.waitForReady(listView, -1)().then(function () {
listView.itemDataSource.insertAtEnd(null, getDataObject('m', 4, "NewTile"))
return Helper.ListView.waitForReady(listView, -1)();
}).then(function () {
return listView.itemDataSource.getCount();
}).then(function (count) {
LiveUnit.Assert.areEqual(101, count);
LiveUnit.Assert.areEqual(101, listView._view.containers.length);
complete();
});
}, bigDataSet);
generate("testInsertToEmpty", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testInsertToEmpty");
Helper.ListView.waitForReady(listView, -1)().
then(function () {
return listView.itemDataSource.insertAtStart(null, getDataObject('s', 0, "NewTile"));
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(1, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
LiveUnit.Assert.areEqual(1, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, '2', "NewTile");
}).
then(complete);
}, []);
/// removes first two items which eliminates group 0
generate("testRemoveFirstItem", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testRemoveFirstItem");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0, 1]);
}).
then(function (dataObjects) {
return listView.itemDataSource.remove(dataObjects[0].key).
then(function () {
return listView.itemDataSource.remove(dataObjects[1].key);
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
LiveUnit.Assert.areEqual(0, listView.scrollPosition);
Helper.ListView.elementsEqual([], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 0, 70, 120, 's', "Tile1");
checkTile(listView, 1, 370, 120, 'm', "Tile6");
checkHeader(listView, 1, 640, 0, "groupListEditorTest_groupheader_", "2");
checkTile(listView, 2, 640, 120, 'b', "Tile2");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 0, 70, 120, 'm', "Tile1");
checkTile(listView, 1, 270, 120, 'm', "Tile6");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "2");
checkTile(listView, 2, 540, 120, 'm', "Tile2");
}
}).
then(complete);
});
/// Removes from end of group 0 and start of group 1
generate("testRemoveAtGroupBoundary", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testRemoveAtGroupBoundary");
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [1, 2]);
}).
then(function (dataObjects) {
return listView.itemDataSource.remove(dataObjects[0].key).then(function () {
return listView.itemDataSource.remove(dataObjects[1].key);
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile0");
checkHeader(listView, 1, 340, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 1, 340, 120, 'm', "Tile6");
}).
then(complete);
});
/// Removes last two items in group 4, which eliminates it
generate("testRemoveLastItem", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testRemoveLastItem");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [8, 9]);
}).
then(function (dataObjects) {
return listView.itemDataSource.remove(dataObjects[0].key).then(function () {
return listView.itemDataSource.remove(dataObjects[1].key);
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, count);
}).
then(function () {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
listView.ensureVisible((ITEMS_COUNT - 1) - 2);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT - 2, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 3, 2080, 0, "groupListEditorTest_groupheader_", "3");
checkTile(listView, 6, 2080, 120, 'm', "Tile3");
checkTile(listView, 7, 2280, 120, 'b', "Tile8");
} else {
checkHeader(listView, 3, 1480, 0, "groupListEditorTest_groupheader_", "3");
checkTile(listView, 6, 1480, 120, 'm', "Tile3");
checkTile(listView, 7, 1680, 120, 'm', "Tile8");
}
}).
then(complete);
});
/// Moves from end of group 0 to start
generate("testMoveToStart", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testMoveToStart");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [1]);
}).
then(function (dataObjects) {
return listView.itemDataSource.moveToStart(dataObjects[0].key);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([1], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'b', "Tile5");
checkTile(listView, 1, 470, 120, 'm', "Tile0");
checkHeader(listView, 1, 740, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 740, 120, 's', "Tile1");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile5");
checkTile(listView, 1, 270, 120, 'm', "Tile0");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "Tile1");
}
}).
then(complete);
});
/// Moves from end of group 0 to before item 0
generate("testMoveBefore", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testMoveBefore");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0, 1]);
}).
then(function (dataObjects) {
return listView.itemDataSource.moveBefore(dataObjects[1].key, dataObjects[0].key);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([1], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'b', "Tile5");
checkTile(listView, 1, 470, 120, 'm', "Tile0");
checkHeader(listView, 1, 740, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 740, 120, 's', "Tile1");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile5");
checkTile(listView, 1, 270, 120, 'm', "Tile0");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "Tile1");
}
}).
then(complete);
});
/// Moves from start of group 0 to after item 1
generate("testMoveAfter", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testMoveAfter");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0, 1]);
}).
then(function (dataObjects) {
return listView.itemDataSource.moveAfter(dataObjects[0].key, dataObjects[1].key);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([1], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'b', "Tile5");
checkTile(listView, 1, 470, 120, 'm', "Tile0");
checkHeader(listView, 1, 740, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 740, 120, 's', "Tile1");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile5");
checkTile(listView, 1, 270, 120, 'm', "Tile0");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "Tile1");
}
}).
then(complete);
});
/// Moves from start of group 4 to after item 9
generate("testMoveToEnd", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testMoveToEnd");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [8]);
}).
then(function (dataObjects) {
return listView.itemDataSource.moveToEnd(dataObjects[0].key);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
}).
then(function () {
listView.ensureVisible(ITEMS_COUNT - 1);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
if (isMultisizeTest) {
checkHeader(listView, 4, 2750, 0, "groupListEditorTest_groupheader_", "4");
checkTile(listView, ITEMS_COUNT - 2, 2750, 120, 'm', "Tile9");
checkTile(listView, ITEMS_COUNT - 1, 2950, 120, 's', "Tile4");
} else {
checkHeader(listView, 4, 1950, 0, "groupListEditorTest_groupheader_", "4");
checkTile(listView, ITEMS_COUNT - 2, 1950, 120, 'm', "Tile9");
checkTile(listView, ITEMS_COUNT - 1, 2150, 120, 'm', "Tile4");
}
}).
then(complete);
});
/// Changes first item in group 0
generate("testChangeFirstItem", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testChangeFirstItem");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [0]);
}).
then(function (dataObjects) {
return listView.itemDataSource.change(dataObjects[0].key, getDataObject('m', 0, "NewTile"));
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "NewTile");
checkTile(listView, 1, 270, 120, 'b', "Tile5");
checkHeader(listView, 1, 740, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 740, 120, 's', "Tile1");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "NewTile");
checkTile(listView, 1, 270, 120, 'm', "Tile5");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "Tile1");
}
}).
then(complete);
});
/// Changes items at boundary
generate("testChangeAtGroupBoundary", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testChangeAtGroupBoundary");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [1, 2]);
}).
then(function (dataObjects) {
return listView.itemDataSource.change(dataObjects[0].key, getDataObject('m', 0, "NewTile")).
then(function () {
return listView.itemDataSource.change(dataObjects[1].key, getDataObject('m', 1, "NewTile"));
});
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
if (isMultisizeTest) {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile0");
checkTile(listView, 1, 270, 120, 'm', "NewTile");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "NewTile");
} else {
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
checkTile(listView, 0, 70, 120, 'm', "Tile0");
checkTile(listView, 1, 270, 120, 'm', "NewTile");
checkHeader(listView, 1, 540, 0, "groupListEditorTest_groupheader_", "1");
checkTile(listView, 2, 540, 120, 'm', "NewTile");
}
}).
then(complete);
});
/// Changes the last item in group 4
generate("testChangeLastItem", function (listView, complete) {
LiveUnit.LoggingCore.logComment("in testChangeLastItem");
var isMultisizeTest = !!listView.layout.groupInfo;
Helper.ListView.waitForReady(listView, -1)().
then(function () {
listView.selection.set(0);
return Helper.ListView.getDataObjects(listView.itemDataSource, [9]);
}).
then(function (dataObjects) {
return listView.itemDataSource.change(dataObjects[0].key, getDataObject('m', 4, "NewTile"));
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
return listView.itemDataSource.getCount();
}).
then(function (count) {
LiveUnit.Assert.areEqual(ITEMS_COUNT, count);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
Helper.ListView.elementsEqual([0], listView.selection.getIndices());
LiveUnit.Assert.areEqual(ITEMS_COUNT, document.querySelectorAll(".win-container").length);
checkHeader(listView, 0, 70, 0, "groupListEditorTest_groupheader_", "0");
listView.ensureVisible(ITEMS_COUNT - 1);
}).
then(Helper.ListView.waitForReady(listView, -1)).
then(function () {
if (isMultisizeTest) {
checkHeader(listView, 4, 2750, 0, "groupListEditorTest_groupheader_", "4");
checkTile(listView, 8, 2750, 120, 's', "Tile4");
checkTile(listView, 9, 3050, 120, 'm', "NewTile");
} else {
checkHeader(listView, 4, 1950, 0, "groupListEditorTest_groupheader_", "4");
checkTile(listView, 8, 1950, 120, 'm', "Tile4");
checkTile(listView, 9, 2150, 120, 'm', "NewTile");
}
}).
then(complete);
});
if (!Helper.Browser.isIE11) {
Helper.disableTest(GroupListEditorTest, "testInsertToEmpty_normal_grouped_grid_BindingList");
Helper.disableTest(GroupListEditorTest, "testInsertToEmpty_normal_grouped_grid_TestDataSource");
}
}
// register the object as a test class by passing in the name
LiveUnit.registerTestClass("WinJSTests.GroupListEditorTest"); | the_stack |
import { Lexer } from './lexer';
import { Token, TokenType, BinOp, UnaryOp, Num, Var, Assign, Update, Import, Load, JS, NoOp, Str, Block, PropDecl, StyleDecl, Program, Template, Console, Tuple, Params, List, Dict, Bool, None, Func, Lambda, Return, Yield, Raise, Continue, Break, DataType, If, While, With, Try, Apply, Attr, Del, Instance, Await, For } from './tokens';
import type { Operand, Module } from './tokens';
/* syntax
program : import_list? block
import_list : import SEMI import_list
block : statement_list style_list
statement_list : statement
| statement SEMI statement_list
statement : assignment_statement
| empty
assignment_statement : variable ASSIGN expr
style_list : style_declaration
| style_declaration style_list
style_declaration : ID LCURLY block RCURLY
| empty
statement_list : statement
| statement SEMI statement_list
empty :
expr: term ((PLUS | MINUS) term)*
term: factor ((MUL | DIV) factor)*
factor : PLUS factor
| MINUS factor
| INTEGER
| LPAREN expr RPAREN
| variable
variable: ID
*/
const UNARYOPS = ['+', '-', '!'];
const BINOPS = [['*', '/', '%'], ['+', '-'], ['==', '!=', '>', '>=', '<', '<='], ['in', 'not in'], ['not'], ['and'], ['or']];
export class Parser {
lexer: Lexer;
current_token: Token;
constructor(lexer: Lexer) {
this.lexer = lexer;
this.current_token = lexer.get_next_token();
}
error(): never {
throw Error('Invalid syntax');
}
eat(token_type: string): Token {
if (this.current_token.type === token_type) {
const next_token = this.lexer.get_next_token();
this.current_token = next_token;
return next_token;
} else {
this.error();
}
}
list(): List {
const values = [];
this.eat(TokenType.LSQUARE);
while(this.current_token.type !== TokenType.RSQUARE) {
values.push(this.expr());
if (this.current_token.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
this.eat(TokenType.RSQUARE);
return new List(values);
}
tuple(): DataType {
let node;
const values:DataType[] = [];
let next = this.eat(TokenType.LPAREN);
if (this.current_token.type !== TokenType.RPAREN) {
node = this.expr();
if (this.current_token.type === TokenType.COMMA) {
// tuple
values.push(node);
this.eat(TokenType.COMMA);
let current = this.current_token;
while(current.type !== TokenType.RPAREN) {
values.push(this.expr());
current = this.current_token;
if (current.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
} else {
// (expr)
this.eat(TokenType.RPAREN);
return node;
}
}
next = this.eat(TokenType.RPAREN);
if (next.type === TokenType.ARROW) {
next = this.eat(TokenType.ARROW);
const params = values.map(i => {
if (i instanceof Var) {
return i.value;
}
this.error();
});
if (next.type === TokenType.LCURLY) {
this.eat(TokenType.LCURLY);
const func = new Func(params, this.block());
this.eat(TokenType.RCURLY);
return func;
}
return new Lambda(params, this.expr());
}
return new Tuple(values);
}
dict(): Dict {
const pairs:[string|number, (Operand | Str | Template | Tuple | List | Dict)][] = [];
this.eat(TokenType.LCURLY);
while(this.current_token.type !== TokenType.RCURLY) {
const token = this.current_token;
if (token.value === undefined) this.error();
if (token.type === TokenType.NUMBER) {
this.eat(TokenType.NUMBER);
} else if (token.type === TokenType.STRING) {
this.eat(TokenType.STRING);
} else {
this.error();
}
this.eat(TokenType.COLON);
pairs.push([token.value, this.expr()]);
if (this.current_token.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
this.eat(TokenType.RCURLY);
return new Dict(pairs);
}
instance(): Instance {
this.eat(TokenType.NEW);
const name = this.current_token.value as string;
this.eat(TokenType.ID);
let params: DataType[] | undefined;
let next = this.current_token;
if (next.type === TokenType.LPAREN) {
next = this.eat(TokenType.LPAREN);
if (!params) params = [];
while(next.type !== TokenType.RPAREN) {
params.push(this.expr());
next = this.current_token;
if (next.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
this.eat(TokenType.RPAREN);
return new Instance(name, params);
}
return new Instance(name);
}
data(): DataType {
// data: NUMBER | TRUE | FALSE | NONE | LPAREN expr RPAREN | variable | arrow function | new object
const token = this.current_token;
switch (token.type) {
case TokenType.NUMBER:
this.eat(TokenType.NUMBER);
return new Num(token);
case TokenType.TRUE:
this.eat(TokenType.TRUE);
return new Bool(true);
case TokenType.FALSE:
this.eat(TokenType.FALSE);
return new Bool(false);
case TokenType.NONE:
this.eat(TokenType.NONE);
return new None();
case TokenType.LPAREN:
return this.tuple();
case TokenType.STRING:
this.eat(TokenType.STRING);
return new Str(token);
case TokenType.TEMPLATE:
this.eat(TokenType.TEMPLATE);
return new Template(token);
case TokenType.LSQUARE:
return this.list();
case TokenType.LCURLY:
return this.dict();
case TokenType.NEW:
return this.instance();
case TokenType.AWAIT:
this.eat(TokenType.AWAIT);
return new Await(this.expr());
case TokenType.ID:
this.eat(TokenType.ID);
// @var add3 = a => a + 3;
if (this.current_token.type === TokenType.ARROW) {
this.eat(TokenType.ARROW);
return new Lambda([token.value as string], this.expr());
}
return new Var(token);
default:
this.error();
}
}
sub(): DataType {
let node = this.data();
while ([TokenType.LSQUARE, TokenType.DOT, TokenType.LPAREN].includes(this.current_token.type)) {
const token = this.current_token;
if (token.type === TokenType.LSQUARE) {
this.eat(TokenType.LSQUARE);
node = new BinOp(node, token, this.expr());
this.eat(TokenType.RSQUARE);
} else if (token.type === TokenType.LPAREN) {
this.eat(TokenType.LPAREN);
const params = [];
let next = this.current_token;
while(next.type !== TokenType.RPAREN) {
params.push(this.expr());
next = this.current_token;
if (next.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
node = new BinOp(node, token, new Params(params));
this.eat(TokenType.RPAREN);
} else {
this.eat(TokenType.DOT);
node = new BinOp(node, token, new Var(this.current_token));
this.eat(TokenType.ID);
}
}
return node;
}
exp(): DataType {
// exp: sub (** sub)*
let node = this.sub();
while (this.current_token.type === TokenType.EXP) {
const token = this.current_token;
this.eat(token.type);
node = new BinOp(node, token, this.sub());
}
return node;
}
unary(): DataType {
// unary: (+|-|!)*exp
let node;
while (UNARYOPS.includes(this.current_token.type)) {
const token = this.current_token;
this.eat(this.current_token.type);
node = new UnaryOp(token, this.exp());
}
return node ?? this.exp();
}
binop(index: number): Operand {
// term: factor ((MUL | DIV) factor)*
if (index === 4 && this.current_token.type === TokenType.NOT) {
// 'not' is a special case
const token = this.current_token;
this.eat(token.type);
return new UnaryOp(token, this.binop(3));
}
let node = index === 0 ? this.unary() : this.binop(index - 1);
while (BINOPS[index].includes(this.current_token.type)) {
const token = this.current_token;
this.eat(token.type);
node = new BinOp(node, token, index === 0 ? this.unary(): this.binop(index - 1));
}
return node;
}
expr(): DataType {
// dict | list | tuple | expr...
// if (this.current_token.type === TokenType.LCURLY) return this.dict();
// if (this.current_token.type === TokenType.LSQUARE) return this.list();
// if (this.current_token.type === TokenType.LPAREN) return this.tuple();
return this.binop(BINOPS.length - 1);
}
exprs(): DataType {
// expr, exprs...
let node = this.expr();
while (this.current_token.type === TokenType.COMMA) {
const token = this.current_token;
this.eat(TokenType.COMMA);
node = new BinOp(node, token, this.exprs());
}
return node;
}
empty(): NoOp {
return new NoOp();
}
console_statement(type: TokenType.LOG | TokenType.WARN | TokenType.ERROR | TokenType.ASSERT): Console {
// @log 3 + 2
this.eat(type);
const expr = this.exprs();
return new Console(type, expr);
}
assignment_statement(): Assign {
// @var name = 3
this.eat(TokenType.VAR);
const left = new Var(this.current_token);
this.eat(TokenType.ID);
const token = this.current_token;
this.eat(TokenType.ASSIGN);
const right = this.exprs();
return new Assign(left, token, right);
}
update_statement(): Update {
// borderWidth = borderWidth + 4rem;
const left = this.current_token;
this.eat(TokenType.ID);
const op = this.current_token;
this.eat(TokenType.ASSIGN);
const right = this.exprs();
return new Update(new Var(left), op, right);
}
javascript_statement(): JS {
// @js {
// ...
// }
const code = this.current_token.value;
this.eat(TokenType.JS);
return new JS(code as string);
}
function_statement(async = false): Func | Lambda {
// @func name(id1, id2) {...}
// @func (id1, id2) {...}
// @func name(id1, id2) => ...
// @func (id1, id2) => ...
let name: string | undefined;
const params: string[] = [];
async ? this.eat(TokenType.ASYNC): this.eat(TokenType.FUNC);
if (this.current_token.type === TokenType.ID) {
name = this.current_token.value as string;
this.eat(TokenType.ID);
}
// @func name(id1, id2, ...)
let next_token = this.eat(TokenType.LPAREN);
while(next_token.type !== TokenType.RPAREN) {
params.push(next_token.value as string);
next_token = this.eat(TokenType.ID);
if (next_token.type === TokenType.COMMA) next_token = this.eat(TokenType.COMMA);
}
this.eat(TokenType.RPAREN);
if (this.current_token.type === TokenType.ARROW) {
this.eat(TokenType.ARROW);
return new Lambda(params, this.expr(), name, async);
}
if (this.current_token.type === TokenType.LCURLY) {
this.eat(TokenType.LCURLY);
const block = this.block();
this.eat(TokenType.RCURLY);
return new Func(params, block, name, async);
}
this.error();
}
if_statement(): If {
this.eat(TokenType.IF);
const expr = this.expr();
this.eat(TokenType.LCURLY);
const state = new If(expr, this.block());
this.eat(TokenType.RCURLY);
while (this.current_token.type === TokenType.ELIF) {
this.eat(TokenType.ELIF);
const expr = this.expr();
this.eat(TokenType.LCURLY);
state.add_elif(expr, this.block());
this.eat(TokenType.RCURLY);
}
if (this.current_token.type === TokenType.ELSE) {
this.eat(TokenType.ELSE);
this.eat(TokenType.LCURLY);
state.add_else(this.block());
this.eat(TokenType.RCURLY);
}
return state;
}
try_statement(): Try {
this.eat(TokenType.TRY);
this.eat(TokenType.LCURLY);
const state = new Try(this.block());
this.eat(TokenType.RCURLY);
while (this.current_token.type === TokenType.EXCEPT) {
const error = this.eat(TokenType.EXCEPT);
if (error.type === TokenType.LCURLY) {
this.eat(TokenType.LCURLY);
state.add_finally_except(this.block());
this.eat(TokenType.RCURLY);
break;
}
const next = this.eat(TokenType.ID);
let alias;
if (next.type === TokenType.AS) {
alias = this.eat(TokenType.AS).value as string;
this.eat(TokenType.ID);
}
this.eat(TokenType.LCURLY);
state.add_except(error.value as string, this.block(), alias);
this.eat(TokenType.RCURLY);
}
if (this.current_token.type === TokenType.ELSE) {
this.eat(TokenType.ELSE);
this.eat(TokenType.LCURLY);
state.add_else(this.block());
this.eat(TokenType.RCURLY);
}
if (this.current_token.type === TokenType.FINALLY) {
this.eat(TokenType.FINALLY);
this.eat(TokenType.LCURLY);
state.add_finally(this.block());
this.eat(TokenType.RCURLY);
}
return state;
}
while_statement(): While {
this.eat(TokenType.WHILE);
const expr = this.expr();
this.eat(TokenType.LCURLY);
const state = new While(expr, this.block());
this.eat(TokenType.RCURLY);
if (this.current_token.type === TokenType.ELSE) {
this.eat(TokenType.ELSE);
this.eat(TokenType.LCURLY);
state.add_else(this.block());
this.eat(TokenType.RCURLY);
}
return state;
}
for_statement(): For {
this.eat(TokenType.FOR);
const variables:string[] = [];
while (this.current_token.type === TokenType.ID) {
variables.push(this.current_token.value as string);
const next = this.eat(TokenType.ID);
if (next.type === TokenType.COMMA) this.eat(TokenType.COMMA);
}
this.eat(TokenType.IN);
const expr = this.expr();
this.eat(TokenType.LCURLY);
const state = new For(variables, expr, this.block());
this.eat(TokenType.RCURLY);
if (this.current_token.type === TokenType.ELSE) {
this.eat(TokenType.ELSE);
this.eat(TokenType.LCURLY);
state.add_else(this.block());
this.eat(TokenType.RCURLY);
}
return state;
}
with_statement(): With {
this.eat(TokenType.WITH);
const expr = this.expr();
const name = this.eat(TokenType.AS).value as string;
this.eat(TokenType.ID);
this.eat(TokenType.LCURLY);
const block = this.block();
this.eat(TokenType.RCURLY);
return new With(expr, name, block);
}
import_statement(): Import {
// @import 'a.windi', 'b.windi', 'c.css'
this.eat(TokenType.IMPORT);
const urls:string[] = [];
while (this.current_token.type === TokenType.STRING) {
urls.push(this.current_token.value as string);
const next_token = this.eat(TokenType.STRING);
if (next_token.type === TokenType.COMMA) {
this.eat(TokenType.COMMA);
}
}
return new Import(urls);
}
import_path(): string {
// from 'module'
const next_token = this.eat(TokenType.FROM);
if (next_token.type !== TokenType.STRING) this.error();
this.eat(TokenType.STRING);
return next_token.value as string;
}
import_all(): Module {
// * from "module"
// * as name from "module"
this.eat(TokenType.MUL);
if (this.current_token.type === TokenType.FROM) return { url: this.import_path(), exports: { '*': '*' } };
if (this.current_token.type === TokenType.AS) {
const next_token = this.eat(TokenType.AS);
this.eat(TokenType.ID);
return { url: this.import_path(), exports: { [next_token.value as string]: '*' } };
}
this.error();
}
import_exports(): Module {
// { export1 , export2 as alias2 , export3 as alias3 } from 'module-name;
const exports: {[key:string]:string} = {};
this.eat(TokenType.LCURLY);
while (this.current_token.type !== TokenType.RCURLY) {
const value = this.current_token.value as string;
const next_token = this.eat(TokenType.ID);
if (next_token.type === TokenType.COMMA) {
exports[value] = value;
this.eat(TokenType.COMMA);
} else if (next_token.type === TokenType.RCURLY) {
exports[value] = value;
} else if (next_token.type === TokenType.AS) {
this.eat(TokenType.AS);
exports[this.current_token.value as string] = value;
this.eat(TokenType.ID);
if (this.current_token.type === TokenType.COMMA) this.eat(TokenType.COMMA);
} else {
this.error();
}
}
this.eat(TokenType.RCURLY);
return { url: this.import_path(), exports };
}
import_default(): Module {
// defaultExport from "module";
// defaultExport, { export1, export2 } from "module-name";
// defaultExport, * as name from 'module-name';
const _default = this.current_token.value as string;
let next_token = this.eat(TokenType.ID);
if (next_token.type === TokenType.FROM) return { url: this.import_path(), default: _default };
if (next_token.type === TokenType.COMMA) {
next_token = this.eat(TokenType.COMMA);
if (next_token.type === TokenType.LCURLY) return { default: _default, ...this.import_exports() };
if (next_token.type === TokenType.MUL) return { default: _default, ...this.import_all() };
}
this.error();
}
load_statement(): Load {
/*
@load 'module1', 'module2', 'module3';
@load { export1 } from "module-name";
@load { export1 as alias1 } from "module-name";
@load { export1 , export2 } from "module-name";
@load { export1 , export2 as alias2 , export3 as alias3 } from "module-name";
@load * from "module";
@load * as name from "module";
@load defaultExport from "module";
@load defaultExport, { export1, export2 } from "module-name";
@load defaultExport, * as name from 'module-name';
*/
const modules: Module[] = [];
this.eat(TokenType.LOAD);
switch (this.current_token.type) {
case TokenType.STRING:
while (this.current_token.type === TokenType.STRING) {
modules.push({ url: this.current_token.value as string });
const next_token = this.eat(TokenType.STRING);
if (next_token.type === TokenType.COMMA) {
this.eat(TokenType.COMMA);
}
}
break;
case TokenType.LCURLY:
modules.push(this.import_exports());
break;
case TokenType.MUL:
modules.push(this.import_all());
break;
case TokenType.ID:
modules.push(this.import_default());
break;
default:
this.error();
}
return new Load(modules);
}
statement(): Assign | NoOp {
/*
statement : assignment_statement
| update_statement
| expression_statement
| console_statement
| empty
*/
let node;
let next_type;
switch (this.current_token.type) {
case TokenType.VAR:
node = this.assignment_statement();
break;
case TokenType.FUNC:
node = this.function_statement(false);
break;
case TokenType.ASYNC:
node = this.function_statement(true);
break;
case TokenType.RETURN:
this.eat(TokenType.RETURN);
node = new Return(this.expr());
break;
case TokenType.APPLY:
node = new Apply(this.current_token.value as string);
this.eat(TokenType.APPLY);
break;
case TokenType.ATTR:
node = new Attr(this.current_token.meta as string, this.current_token.value as string);
this.eat(TokenType.ATTR);
break;
case TokenType.YIELD:
this.eat(TokenType.YIELD);
node = new Yield(this.expr());
break;
case TokenType.RAISE:
this.eat(TokenType.RAISE);
node = new Raise(this.expr());
break;
case TokenType.DEL:
this.eat(TokenType.DEL);
node = new Del(this.expr());
break;
case TokenType.CONTINUE:
this.eat(TokenType.CONTINUE);
node = new Continue();
break;
case TokenType.BREAK:
this.eat(TokenType.BREAK);
node = new Break();
break;
case TokenType.IMPORT:
node = this.import_statement();
break;
case TokenType.LOAD:
node = this.load_statement();
break;
case TokenType.LOG:
node = this.console_statement(TokenType.LOG);
break;
case TokenType.WARN:
node = this.console_statement(TokenType.WARN);
break;
case TokenType.ERROR:
node = this.console_statement(TokenType.ERROR);
break;
case TokenType.ASSERT:
node = this.console_statement(TokenType.ASSERT);
break;
case TokenType.IF:
node = this.if_statement();
break;
case TokenType.WHILE:
node = this.while_statement();
break;
case TokenType.FOR:
node = this.for_statement();
break;
case TokenType.WITH:
node = this.with_statement();
break;
case TokenType.TRY:
node = this.try_statement();
break;
case TokenType.JS:
node = this.javascript_statement();
break;
case TokenType.LPAREN:
case TokenType.LCURLY:
case TokenType.LSQUARE:
node = this.exprs();
break;
case TokenType.ID:
next_type = this.lexer.peek_next_token().type;
if (next_type === TokenType.ASSIGN) {
// update statement
node = this.update_statement();
} else if ([TokenType.LCURLY, TokenType.STRING, TokenType.TEMPLATE].includes(next_type)) {
// style
node = this.empty();
} else {
// expression statement
node = this.exprs();
}
break;
default:
node = this.empty();
}
return node;
}
statement_list(): ( Assign | NoOp )[] {
/*
statement_list : statement
| statement SEMI statement_list
*/
const node = this.statement();
const results = [ node ];
while (this.current_token.type === TokenType.SEMI) {
this.eat(TokenType.SEMI);
results.push(this.statement());
}
if (this.current_token.type === TokenType.VAR) this.error();
return results;
}
style_declaration(): StyleDecl | PropDecl | NoOp {
/*
style_declaration : ID LCURLY block RCURLY
| ID Template | Str
| prop_list
| empty
*/
let node;
const name = this.current_token.value?.toString() ?? '';
if (this.current_token.type === TokenType.ID) {
const next_token = this.eat(TokenType.ID);
if (next_token.type === TokenType.LCURLY) {
// style
this.eat(TokenType.LCURLY);
node = new StyleDecl(name, this.block());
this.eat(TokenType.RCURLY);
} else if (next_token.type === TokenType.STRING) {
// prop
this.eat(TokenType.STRING);
node = new PropDecl(name, new Str(next_token));
this.eat(TokenType.SEMI);
} else if (next_token.type === TokenType.TEMPLATE) {
// prop
this.eat(TokenType.TEMPLATE);
node = new PropDecl(name, new Template(next_token));
this.eat(TokenType.SEMI);
} else {
this.error();
}
} else {
node = this.empty();
}
return node;
}
style_list(): (StyleDecl | PropDecl | NoOp)[] {
/*
style_list : style_declaration
| prop_declaration
| style_declaration style_list
*/
const node = this.style_declaration();
const results = [ node ];
while (this.current_token.type === TokenType.ID) {
results.push(this.style_declaration());
}
return results;
}
block(): Block {
return new Block(this.statement_list(), this.style_list());
}
program(): Program {
return new Program(this.block());
}
parse(): Program {
const node = this.program();
if (this.current_token.type !== TokenType.EOF) this.error();
return node;
}
} | the_stack |
import { BasePluginComponent } from 'src/interfaces/base-plugin-component';
import ace from 'ace-builds';
import 'ace-builds/webpack-resolver';
import md from 'markdown-it';
import emoji from 'markdown-it-emoji';
import sub from 'markdown-it-sub';
import sup from 'markdown-it-sup';
import footnote from 'markdown-it-footnote';
import mark from 'markdown-it-mark';
import ins from 'markdown-it-ins';
import container from 'markdown-it-container';
import abbr from 'markdown-it-abbr';
import deflist from 'markdown-it-deflist';
import taskLists from 'markdown-it-task-lists';
import namedCodeBlocks from 'markdown-it-named-code-blocks';
import highlightjs from 'markdown-it-highlightjs';
import toc from 'markdown-it-table-of-contents';
import hljs from 'highlight.js/lib/core';
declare let $: any;
export class AddMarkDownComponent implements BasePluginComponent {
private editors: Map<string, ace.Ace.Editor>;
// public mode = 'editor';
private markdown;
private emoji;
constructor() {
hljs.registerLanguage(
'actionscript',
require('highlight.js/lib/languages/actionscript')
);
hljs.registerLanguage(
'apache',
require('highlight.js/lib/languages/apache')
);
hljs.registerLanguage(
'armasm',
require('highlight.js/lib/languages/armasm')
);
hljs.registerLanguage('xml', require('highlight.js/lib/languages/xml'));
hljs.registerLanguage(
'asciidoc',
require('highlight.js/lib//languages/asciidoc')
);
hljs.registerLanguage(
'avrasm',
require('highlight.js/lib/languages/avrasm')
);
hljs.registerLanguage(
'bash',
require('highlight.js/lib/languages/bash')
);
hljs.registerLanguage(
'clojure',
require('highlight.js/lib/languages/clojure')
);
hljs.registerLanguage(
'cmake',
require('highlight.js/lib/languages/cmake')
);
hljs.registerLanguage(
'coffeescript',
require('highlight.js/lib/languages/coffeescript')
);
hljs.registerLanguage(
'c-like',
require('highlight.js/lib/languages/c-like')
);
hljs.registerLanguage('c', require('highlight.js/lib/languages/c'));
hljs.registerLanguage('cpp', require('highlight.js/lib/languages/cpp'));
hljs.registerLanguage(
'arduino',
require('highlight.js/lib/languages/arduino')
);
hljs.registerLanguage('css', require('highlight.js/lib/languages/css'));
hljs.registerLanguage(
'diff',
require('highlight.js/lib/languages/diff')
);
hljs.registerLanguage(
'django',
require('highlight.js/lib/languages/django')
);
hljs.registerLanguage(
'dockerfile',
require('highlight.js/lib/languages/dockerfile')
);
hljs.registerLanguage(
'ruby',
require('highlight.js/lib/languages/ruby')
);
hljs.registerLanguage(
'fortran',
require('highlight.js/lib/languages/fortran')
);
hljs.registerLanguage(
'glsl',
require('highlight.js/lib/languages/glsl')
);
hljs.registerLanguage('go', require('highlight.js/lib/languages/go'));
hljs.registerLanguage(
'groovy',
require('highlight.js/lib/languages/groovy')
);
hljs.registerLanguage(
'handlebars',
require('highlight.js/lib/languages/handlebars')
);
hljs.registerLanguage(
'haskell',
require('highlight.js/lib/languages/haskell')
);
hljs.registerLanguage('ini', require('highlight.js/lib/languages/ini'));
hljs.registerLanguage(
'java',
require('highlight.js/lib/languages/java')
);
hljs.registerLanguage(
'javascript',
require('highlight.js/lib/languages/javascript')
);
hljs.registerLanguage(
'json',
require('highlight.js/lib/languages/json')
);
hljs.registerLanguage(
'latex',
require('highlight.js/lib/languages/latex')
);
hljs.registerLanguage(
'less',
require('highlight.js/lib/languages/less')
);
hljs.registerLanguage(
'lisp',
require('highlight.js/lib/languages/lisp')
);
hljs.registerLanguage(
'livescript',
require('highlight.js/lib/languages/livescript')
);
hljs.registerLanguage('lua', require('highlight.js/lib/languages/lua'));
hljs.registerLanguage(
'makefile',
require('highlight.js/lib/languages/makefile')
);
hljs.registerLanguage(
'matlab',
require('highlight.js/lib/languages/matlab')
);
hljs.registerLanguage(
'mipsasm',
require('highlight.js/lib/languages/mipsasm')
);
hljs.registerLanguage(
'perl',
require('highlight.js/lib/languages/perl')
);
hljs.registerLanguage(
'nginx',
require('highlight.js/lib/languages/nginx')
);
hljs.registerLanguage(
'objectivec',
require('highlight.js/lib/languages/objectivec')
);
hljs.registerLanguage('php', require('highlight.js/lib/languages/php'));
hljs.registerLanguage(
'python',
require('highlight.js/lib/languages/python')
);
hljs.registerLanguage(
'rust',
require('highlight.js/lib/languages/rust')
);
hljs.registerLanguage(
'scala',
require('highlight.js/lib/languages/scala')
);
hljs.registerLanguage(
'scheme',
require('highlight.js/lib/languages/scheme')
);
hljs.registerLanguage(
'scss',
require('highlight.js/lib/languages/scss')
);
hljs.registerLanguage(
'smalltalk',
require('highlight.js/lib/languages/smalltalk')
);
hljs.registerLanguage(
'stylus',
require('highlight.js/lib/languages/stylus')
);
hljs.registerLanguage(
'swift',
require('highlight.js/lib/languages/swift')
);
hljs.registerLanguage('tcl', require('highlight.js/lib/languages/tcl'));
hljs.registerLanguage(
'typescript',
require('highlight.js/lib/languages/typescript')
);
hljs.registerLanguage(
'verilog',
require('highlight.js/lib/languages/verilog')
);
hljs.registerLanguage(
'vhdl',
require('highlight.js/lib/languages/vhdl')
);
hljs.registerLanguage(
'yaml',
require('highlight.js/lib/languages/yaml')
);
hljs.initHighlightingOnLoad();
this.markdown = md({
html: true, // Enable HTML tags in source
xhtmlOut: true, // Use '/' to close single tags (<br />).
// This is only for full CommonMark compatibility.
breaks: false, // Convert '\n' in paragraphs into <br>
langPrefix: 'language-', // CSS language prefix for fenced blocks. Can be
// useful for external highlighters.
linkify: true, // Autoconvert URL-like text to links
// Enable some language-neutral replacement + quotes beautification
// For the full list of replacements, see https://github.com/markdown-it/markdown-it/blob/master/lib/rules_core/replacements.js
typographer: true,
// Double + single quotes replacement pairs, when typographer enabled,
// and smartquotes on. Could be either a String or an Array.
//
// For example, you can use '«»„“' for Russian, '„“‚‘' for German,
// and ['«\xA0', '\xA0»', '‹\xA0', '\xA0›'] for French (including nbsp).
quotes: '“”‘’',
// Highlighter function. Should return escaped HTML,
// or '' if the source string is not changed and should be escaped externally.
// If result starts with <pre... internal wrapper is skipped.
highlight: this.highlightOption,
});
// this.emoji = emoji();
this.markdown.use(emoji);
this.markdown.use(sub);
this.markdown.use(sup);
this.markdown.use(footnote);
this.markdown.use(mark);
this.markdown.use(ins);
this.markdown.use(container);
this.markdown.use(abbr);
this.markdown.use(deflist);
this.markdown.use(taskLists, { label: true, labelAfter: true });
this.markdown.use(highlightjs, { hljs, inline: true });
this.markdown.use(namedCodeBlocks);
this.markdown.use(toc);
this.editors = new Map();
}
addToolBox = (uid) => {
$(`#original-${uid}`).attr('contenteditable', false);
$(`#original-${uid}`).append(`
<nav>
<div class="nav nav-tabs" id="md-nav-tab-${uid}" role="tablist">
<a class="nav-item nav-link active" id="md-nav-home-tab-${uid}" data-toggle="tab" href="#md-nav-edit-${uid}" role="tab" aria-controls="nav-edit" aria-selected="true">Edit</a>
<a class="nav-item nav-link" id="md-nav-profile-tab-${uid}" data-toggle="tab" href="#md-nav-preview-${uid}" role="tab" aria-controls="nav-preview" aria-selected="false">Preview</a>
</div>
</nav>
<div class="tab-content" id="md-nav-tabContent-${uid}">
<div class="tab-pane fade show active" id="md-nav-edit-${uid}" role="tabpanel" aria-labelledby="md-edit-tab"></div>
<div class="tab-pane fade markdown-body" id="md-nav-preview-${uid}" role="tabpanel" aria-labelledby="md-preview-tab" style="padding-top: 20px;"></div>
</div>
`);
ace.config.set('fontSize', '14px');
$(`#md-nav-edit-${uid}`).css({
position: 'relative',
width: $(`#original-${uid}`).width(),
height: '390px',
resize: 'vertical',
});
// ace.config.set('basePath', srcNoconflict);
this.editors.set(uid, ace.edit(`md-nav-edit-${uid}`));
const editor = this.editors.get(uid);
editor.session.setMode('ace/mode/markdown');
// editor.setTheme('ace/mode/github');
editor.session.setValue('<!--- Enter your markdown code here --->\n');
editor.focus();
editor.navigateFileEnd();
// Changing Markdown Preview to match code in the Editor Tab
$(`#md-nav-profile-tab-${uid}`).click(() => {
const outHtml = this.markdown.render(editor.session.getValue());
$(`#md-nav-preview-${uid}`).empty().append(outHtml);
});
// To set focus to editor
$(`#md-nav-home-tab-${uid}`).click(() => {
editor.focus();
});
};
highlightOption(str, lang) {
if (lang && hljs.getLanguage(lang)) {
try {
return (
'<pre class="hljs"><code>' +
hljs.highlight(lang, str, true).value +
'</code></pre>'
);
} catch (__) {}
}
return (
'<pre class="hljs"><code>' +
this.markdown.utils.escapeHtml(str) +
'</code></pre>'
);
}
getContent(uid): string {
if (this.editors.has(uid)) {
return this.editors.get(uid).session.getValue();
} else {
return '';
}
}
setContent(uid, content: string) {
if (this.editors.has(uid)) {
this.editors.get(uid).session.setValue(content);
// To Show Preview by default
$(`#md-nav-profile-tab-${uid}`).trigger('click');
} else {
this.addToolBox(uid);
this.editors.get(uid).session.setValue(content);
// To Show Preview by default
$(`#md-nav-profile-tab-${uid}`).trigger('click');
}
}
} | the_stack |
// clang-format off
import 'chrome://resources/cr_elements/cr_lottie/cr_lottie.m.js';
import {CrLottieElement, LOTTIE_JS_URL} from 'chrome://resources/cr_elements/cr_lottie/cr_lottie.m.js';
import {flush} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {assertDeepEquals, assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js';
import {MockController, MockMethod} from 'chrome://webui-test/mock_controller.js';
import {eventToPromise} from 'chrome://webui-test/test_util.js';
// clang-format on
/** @fileoverview Suite of tests for cr-lottie. */
suite('cr_lottie_test', function() {
/**
* A data url that produces a sample solid green json lottie animation.
*/
const SAMPLE_LOTTIE_GREEN: string =
'data:application/json;base64,eyJ2IjoiNC42LjkiLCJmciI6NjAsImlwIjowLCJvc' +
'CI6MjAwLCJ3Ijo4MDAsImgiOjYwMCwiZGRkIjowLCJhc3NldHMiOltdLCJsYXllcnMiOlt' +
'7ImluZCI6MSwidHkiOjEsInNjIjoiIzAwZmYwMCIsImFvIjowLCJpcCI6MCwib3AiOjIwM' +
'Cwic3QiOjAsInNyIjoxLCJzdyI6ODAwLCJzaCI6NjAwLCJibSI6MCwia3MiOnsibyI6eyJ' +
'hIjowLCJrIjoxMDB9LCJyIjp7ImEiOjAsImsiOlswLDAsMF19LCJwIjp7ImEiOjAsImsiO' +
'lszMDAsMjAwLDBdfSwiYSI6eyJhIjowLCJrIjpbMzAwLDIwMCwwXX0sInMiOnsiYSI6MCw' +
'iayI6WzEwMCwxMDAsMTAwXX19fV19';
/**
* A data url that produces a sample solid blue json lottie animation.
*/
const SAMPLE_LOTTIE_BLUE: string =
'data:application/json;base64,eyJhc3NldHMiOltdLCJkZGQiOjAsImZyIjo2MCwia' +
'CI6NjAwLCJpcCI6MCwibGF5ZXJzIjpbeyJhbyI6MCwiYm0iOjAsImluZCI6MSwiaXAiOjA' +
'sImtzIjp7ImEiOnsiYSI6MCwiayI6WzMwMCwyMDAsMF19LCJvIjp7ImEiOjAsImsiOjEwM' +
'H0sInAiOnsiYSI6MCwiayI6WzMwMCwyMDAsMF19LCJyIjp7ImEiOjAsImsiOlswLDAsMF1' +
'9LCJzIjp7ImEiOjAsImsiOlsxMDAsMTAwLDEwMF19fSwib3AiOjIwMCwic2MiOiIjMDAwM' +
'GZmIiwic2giOjYwMCwic3IiOjEsInN0IjowLCJzdyI6ODAwLCJ0eSI6MX1dLCJvcCI6MjA' +
'wLCJ2IjoiNC42LjkiLCJ3Ijo4MDB9';
/**
* A green pixel as returned by samplePixel.
*/
const GREEN_PIXEL: number[] = [0, 255, 0, 255];
/**
* A blue pixel as returned by samplePixel.
*/
const BLUE_PIXEL: number[] = [0, 0, 255, 255];
let mockController: MockController;
let crLottieElement: CrLottieElement;
let container: HTMLElement;
let canvas: HTMLCanvasElement;
let lottieWorkerJs: Blob;
let waitForInitializeEvent: Promise<void>;
let waitForPlayingEvent: Promise<void>;
setup(function(done) {
mockController = new MockController();
const xhr = new XMLHttpRequest();
xhr.open('GET', LOTTIE_JS_URL, true);
xhr.responseType = 'blob';
xhr.send();
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
assertEquals(200, xhr.status);
lottieWorkerJs = /** @type {Blob} */ (xhr.response);
done();
}
};
});
teardown(function() {
mockController.reset();
});
function createLottieElement() {
document.body.innerHTML = '';
crLottieElement =
/** @type {!CrLottieElement} */ (document.createElement('cr-lottie'));
crLottieElement.animationUrl = SAMPLE_LOTTIE_GREEN;
crLottieElement.autoplay = true;
waitForInitializeEvent =
eventToPromise('cr-lottie-initialized', crLottieElement);
waitForPlayingEvent = eventToPromise('cr-lottie-playing', crLottieElement);
container = /** @type {!HTMLDivElement} */ (document.createElement('div'));
container.style.width = '300px';
container.style.height = '200px';
document.body.appendChild(container);
container.appendChild(crLottieElement);
canvas = crLottieElement.$.canvas;
flush();
}
/**
* Samples a pixel from the lottie canvas.
*
* @return {!Promise<!Array<number>>} the pixel color as a [red, green, blue,
* transparency] tuple with values 0-255.
*/
async function samplePixel() {
// It's not possible to get the context from a canvas that had its control
// transferred to an OffscreenCanvas, or from a detached OffscreenCanvas.
// Instead, copy the rendered canvas into a new canvas and sample a pixel
// from it.
const img = document.createElement('img');
const waitForLoad = eventToPromise('load', img);
const canvas = crLottieElement.$.canvas;
img.setAttribute('src', canvas.toDataURL());
await waitForLoad;
const imgCanvas = document.createElement('canvas');
imgCanvas.width = canvas.width;
imgCanvas.height = canvas.height;
const context = imgCanvas.getContext('2d')!;
context.drawImage(img, 0, 0);
return Array.from(
context.getImageData(canvas.width / 2, canvas.height / 2, 1, 1).data);
}
test('TestResize', async () => {
createLottieElement();
await waitForInitializeEvent;
await waitForPlayingEvent;
const newHeight = 300;
const newWidth = 400;
const waitForResizeEvent =
/** @type {!Promise<!CustomEvent<{width: number, height: number}>>} */ (
eventToPromise('cr-lottie-resized', crLottieElement));
// Update size of parent div container to see if the canvas is resized.
container.style.width = newWidth + 'px';
container.style.height = newHeight + 'px';
const resizeEvent = await waitForResizeEvent;
assertEquals(resizeEvent.detail.height, newHeight);
assertEquals(resizeEvent.detail.width, newWidth);
});
test('TestPlayPause', async () => {
createLottieElement();
await waitForInitializeEvent;
await waitForPlayingEvent;
const waitForPauseEvent =
eventToPromise('cr-lottie-paused', crLottieElement);
crLottieElement.setPlay(false);
await waitForPauseEvent;
waitForPlayingEvent = eventToPromise('cr-lottie-playing', crLottieElement);
crLottieElement.setPlay(true);
await waitForPlayingEvent;
});
test('TestPlayBeforeInit', async () => {
createLottieElement();
assertTrue(crLottieElement.autoplay);
crLottieElement.setPlay(false);
assertFalse(crLottieElement.autoplay);
crLottieElement.setPlay(true);
assertTrue(crLottieElement.autoplay);
await waitForInitializeEvent;
await waitForPlayingEvent;
});
test('TestRenderFrame', async () => {
// TODO(crbug.com/1108915): Offscreen canvas has a race issue when used in
// this test framework. To ensure that we capture a frame from the animation
// and not an empty frame, we delay the capture by 2 seconds.
// Note: This issue is only observed in tests.
const kRaceTimeout = 2000;
createLottieElement();
await waitForInitializeEvent;
await waitForPlayingEvent;
const waitForFrameRender = new Promise<void>(function(resolve) {
window.setTimeout(resolve, kRaceTimeout);
});
await waitForFrameRender;
assertDeepEquals(GREEN_PIXEL, await samplePixel());
});
test('TestChangeAnimationUrl', async () => {
// TODO(crbug.com/1108915): Offscreen canvas has a race issue when used in
// this test framework. To ensure that we capture a frame from the animation
// and not an empty frame, we delay the capture by 2 seconds.
// Note: This issue is only observed in tests.
const kRaceTimeout = 2000;
createLottieElement();
await waitForInitializeEvent;
await waitForPlayingEvent;
const waitForStoppedEvent =
eventToPromise('cr-lottie-stopped', crLottieElement);
waitForInitializeEvent =
eventToPromise('cr-lottie-initialized', crLottieElement);
waitForPlayingEvent = eventToPromise('cr-lottie-playing', crLottieElement);
crLottieElement.animationUrl = SAMPLE_LOTTIE_BLUE;
// The previous animation should be cleared and stopped between loading.
// Unfortunately since the offscreen canvas is rendered asynchronously,
// there is no way to grab a frame in between events and have it guaranteed
// to be the empty frame. At least wait for the `cr-lottie-stopped` event.
await waitForStoppedEvent;
await waitForInitializeEvent;
await waitForPlayingEvent;
const waitForFrameRender = new Promise<void>(function(resolve) {
setTimeout(resolve, kRaceTimeout);
});
await waitForFrameRender;
assertDeepEquals(BLUE_PIXEL, await samplePixel());
});
test('TestHidden', async () => {
await waitForPlayingEvent;
assertFalse(canvas.hidden);
crLottieElement.hidden = true;
assertTrue(canvas.hidden);
});
test('TestDetachBeforeImageLoaded', async () => {
const mockXhr = {
onreadystatechange: () => {},
} as unknown as XMLHttpRequest;
mockXhr.open = mockController.createFunctionMock(mockXhr, 'open') as any;
mockXhr.send = mockController.createFunctionMock(mockXhr, 'send') as any;
mockXhr.abort = mockController.createFunctionMock(mockXhr, 'abort') as any;
const mockXhrConstructor =
mockController.createFunctionMock(window, 'XMLHttpRequest');
// Expectations for loading the worker.
mockXhrConstructor.addExpectation();
(mockXhr.open as unknown as MockMethod)
.addExpectation(
'GET', 'chrome://resources/lottie/lottie_worker.min.js', true);
(mockXhr.send as unknown as MockMethod).addExpectation();
// Expectations for loading the image and aborting it.
mockXhrConstructor.addExpectation();
(mockXhr.open as unknown as MockMethod)
.addExpectation('GET', SAMPLE_LOTTIE_GREEN, true);
(mockXhr.send as unknown as MockMethod).addExpectation();
(mockXhr.abort as unknown as MockMethod).addExpectation();
mockXhrConstructor.returnValue = mockXhr;
createLottieElement();
// Return the lottie worker.
Object.defineProperty(mockXhr, 'response', {value: lottieWorkerJs});
Object.defineProperty(mockXhr, 'readyState', {value: 4});
Object.defineProperty(mockXhr, 'status', {value: 200});
mockXhr.onreadystatechange!(new Event('readystatchange'));
// Detaching the element before the image has loaded should abort the
// request.
crLottieElement.remove();
mockController.verifyMocks();
});
test('TestLoadNewImageWhileOldImageIsStillLoading', async () => {
const mockXhr = {
onreadystatechange: () => {},
} as unknown as XMLHttpRequest;
mockXhr.open = mockController.createFunctionMock(mockXhr, 'open') as any;
mockXhr.send = mockController.createFunctionMock(mockXhr, 'send') as any;
mockXhr.abort = mockController.createFunctionMock(mockXhr, 'abort') as any;
const mockXhrConstructor =
mockController.createFunctionMock(window, 'XMLHttpRequest');
// Expectations for loading the worker.
mockXhrConstructor.addExpectation();
(mockXhr.open as unknown as MockMethod)
.addExpectation(
'GET', 'chrome://resources/lottie/lottie_worker.min.js', true);
(mockXhr.send as unknown as MockMethod).addExpectation();
// Expectations for loading the first image and aborting it.
mockXhrConstructor.addExpectation();
(mockXhr.open as unknown as MockMethod)
.addExpectation('GET', SAMPLE_LOTTIE_GREEN, true);
(mockXhr.send as unknown as MockMethod).addExpectation();
(mockXhr.abort as unknown as MockMethod).addExpectation();
// Expectations for loading the second image.
mockXhrConstructor.addExpectation();
(mockXhr.open as unknown as MockMethod)
.addExpectation('GET', SAMPLE_LOTTIE_BLUE, true);
(mockXhr.send as unknown as MockMethod).addExpectation();
mockXhrConstructor.returnValue = mockXhr;
createLottieElement();
// Return the lottie worker.
Object.defineProperty(mockXhr, 'response', {value: lottieWorkerJs});
Object.defineProperty(mockXhr, 'readyState', {value: 4});
Object.defineProperty(mockXhr, 'status', {value: 200});
mockXhr.onreadystatechange!(new Event('readystatchange'));
// Attempting to load a new image should abort the first request and start a
// new one.
crLottieElement.animationUrl = SAMPLE_LOTTIE_BLUE;
mockController.verifyMocks();
});
}); | the_stack |
import { Handle } from './Destroyable';
import { DNode, RenderResult } from './interfaces';
import { isWNode, isVNode } from './vdom';
const slice = Array.prototype.slice;
const hasOwnProperty = Object.prototype.hasOwnProperty;
export interface Modifier<T extends DNode> {
(dNode: T, breaker: () => void): void;
}
export interface Predicate<T extends DNode> {
(dNode: DNode): dNode is T;
}
export interface DecorateOptions<T extends DNode> {
modifier: Modifier<T>;
predicate?: Predicate<T>;
shallow?: boolean;
}
/**
* Type guard that ensures that the value can be coerced to Object
* to weed out host objects that do not derive from Object.
* This function is used to check if we want to deep copy an object or not.
* Note: In ES6 it is possible to modify an object's Symbol.toStringTag property, which will
* change the value returned by `toString`. This is a rare edge case that is difficult to handle,
* so it is not handled here.
* @param value The value to check
* @return If the value is coercible into an Object
*/
function shouldDeepCopyObject(value: any): value is Object {
return Object.prototype.toString.call(value) === '[object Object]';
}
function copyArray<T>(array: T[], inherited: boolean): T[] {
return array.map(function(item: T): T {
if (Array.isArray(item)) {
return copyArray(item, inherited) as any;
}
return !shouldDeepCopyObject(item)
? item
: _mixin({
deep: true,
inherited: inherited,
sources: <Array<T>>[item],
target: <T>{}
});
});
}
interface MixinArgs<T extends {}, U extends {}> {
deep: boolean;
inherited: boolean;
sources: (U | null | undefined)[];
target: T;
copied?: any[];
}
function _mixin<T extends {}, U extends {}>(kwArgs: MixinArgs<T, U>): T & U {
const deep = kwArgs.deep;
const inherited = kwArgs.inherited;
const target: any = kwArgs.target;
const copied = kwArgs.copied || [];
const copiedClone = [...copied];
for (let i = 0; i < kwArgs.sources.length; i++) {
const source = kwArgs.sources[i];
if (source === null || source === undefined) {
continue;
}
for (let key in source) {
if (inherited || hasOwnProperty.call(source, key)) {
let value: any = source[key];
if (copiedClone.indexOf(value) !== -1) {
continue;
}
if (deep) {
if (Array.isArray(value)) {
value = copyArray(value, inherited);
} else if (shouldDeepCopyObject(value)) {
const targetValue: any = target[key] || {};
copied.push(source);
value = _mixin({
deep: true,
inherited: inherited,
sources: [value],
target: targetValue,
copied
});
}
}
target[key] = value;
}
}
}
return <T & U>target;
}
/**
* Copies the values of all enumerable own properties of one or more source objects to the target object,
* recursively copying all nested objects and arrays as well.
*
* @param target The target object to receive values from source objects
* @param sources Any number of objects whose enumerable own properties will be copied to the target object
* @return The modified target object
*/
export function deepAssign<
T extends {},
U extends {},
V extends {},
W extends {},
X extends {},
Y extends {},
Z extends {}
>(target: T, source1: U, source2: V, source3: W, source4: X, source5: Y, source6: Z): T & U & V & W & X & Y & Z;
export function deepAssign<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}, Y extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X,
source5: Y
): T & U & V & W & X & Y;
export function deepAssign<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X
): T & U & V & W & X;
export function deepAssign<T extends {}, U extends {}, V extends {}, W extends {}>(
target: T,
source1: U,
source2: V,
source3: W
): T & U & V & W;
export function deepAssign<T extends {}, U extends {}, V extends {}>(target: T, source1: U, source2: V): T & U & V;
export function deepAssign<T extends {}, U extends {}>(target: T, source: U): T & U;
export function deepAssign(target: any, ...sources: any[]): any {
return _mixin({
deep: true,
inherited: false,
sources: sources,
target: target
});
}
/**
* Copies the values of all enumerable (own or inherited) properties of one or more source objects to the
* target object, recursively copying all nested objects and arrays as well.
*
* @param target The target object to receive values from source objects
* @param sources Any number of objects whose enumerable properties will be copied to the target object
* @return The modified target object
*/
export function deepMixin<
T extends {},
U extends {},
V extends {},
W extends {},
X extends {},
Y extends {},
Z extends {}
>(target: T, source1: U, source2: V, source3: W, source4: X, source5: Y, source6: Z): T & U & V & W & X & Y & Z;
export function deepMixin<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}, Y extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X,
source5: Y
): T & U & V & W & X & Y;
export function deepMixin<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X
): T & U & V & W & X;
export function deepMixin<T extends {}, U extends {}, V extends {}, W extends {}>(
target: T,
source1: U,
source2: V,
source3: W
): T & U & V & W;
export function deepMixin<T extends {}, U extends {}, V extends {}>(target: T, source1: U, source2: V): T & U & V;
export function deepMixin<T extends {}, U extends {}>(target: T, source: U): T & U;
export function deepMixin(target: any, ...sources: any[]): any {
return _mixin({
deep: true,
inherited: true,
sources: sources,
target: target
});
}
/**
* Copies the values of all enumerable (own or inherited) properties of one or more source objects to the
* target object.
*
* @return The modified target object
*/
export function mixin<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}, Y extends {}, Z extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X,
source5: Y,
source6: Z
): T & U & V & W & X & Y & Z;
export function mixin<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}, Y extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X,
source5: Y
): T & U & V & W & X & Y;
export function mixin<T extends {}, U extends {}, V extends {}, W extends {}, X extends {}>(
target: T,
source1: U,
source2: V,
source3: W,
source4: X
): T & U & V & W & X;
export function mixin<T extends {}, U extends {}, V extends {}, W extends {}>(
target: T,
source1: U,
source2: V,
source3: W
): T & U & V & W;
export function mixin<T extends {}, U extends {}, V extends {}>(target: T, source1: U, source2: V): T & U & V;
export function mixin<T extends {}, U extends {}>(target: T, source: U): T & U;
export function mixin(target: any, ...sources: any[]): any {
return _mixin({
deep: false,
inherited: true,
sources: sources,
target: target
});
}
/**
* Returns a function which invokes the given function with the given arguments prepended to its argument list.
* Like `Function.prototype.bind`, but does not alter execution context.
*
* @param targetFunction The function that needs to be bound
* @param suppliedArgs An optional array of arguments to prepend to the `targetFunction` arguments list
* @return The bound function
*/
export function partial(targetFunction: (...args: any[]) => any, ...suppliedArgs: any[]): (...args: any[]) => any {
return function(this: any) {
const args: any[] = arguments.length ? suppliedArgs.concat(slice.call(arguments)) : suppliedArgs;
return targetFunction.apply(this, args);
};
}
export function guaranteeMinimumTimeout(callback: (...args: any[]) => void, delay?: number): Handle {
const startTime = Date.now();
let timerId: any;
function timeoutHandler() {
const delta = Date.now() - startTime;
if (delay == null || delta >= delay) {
callback();
} else {
timerId = setTimeout(timeoutHandler, delay - delta);
}
}
timerId = setTimeout(timeoutHandler, delay);
return {
destroy: () => {
if (timerId != null) {
clearTimeout(timerId);
timerId = null;
}
}
};
}
export function debounce<T extends (this: any, ...args: any[]) => void>(callback: T, delay: number): T {
let timer: Handle | null;
return <T>function() {
timer && timer.destroy();
let context = this;
let args: any | null = arguments;
timer = guaranteeMinimumTimeout(function() {
callback.apply(context, args);
args = context = timer = null;
}, delay);
};
}
export function throttle<T extends (this: any, ...args: any[]) => void>(callback: T, delay: number): T {
let ran: boolean | null;
return <T>function() {
if (ran) {
return;
}
ran = true;
let args: any | null = arguments;
callback.apply(this, args);
guaranteeMinimumTimeout(function() {
ran = null;
}, delay);
};
}
export function uuid(): string {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
const r = (Math.random() * 16) | 0,
v = c === 'x' ? r : (r & 0x3) | 0x8;
return v.toString(16);
});
}
/**
* Generic decorate function for DNodes. The nodes are modified in place based on the provided predicate
* and modifier functions.
*
* The children of each node are flattened and added to the array for decoration.
*
* If no predicate is supplied then the modifier will be executed on all nodes. A `breaker` function is passed to the
* modifier which will drain the nodes array and exit the decoration.
*
* When the `shallow` options is set to `true` the only the top node or nodes will be decorated (only supported using
* `DecorateOptions`).
*/
export function decorate<T extends DNode>(dNodes: DNode, options: DecorateOptions<T>): DNode;
export function decorate<T extends DNode>(dNodes: DNode[], options: DecorateOptions<T>): DNode[];
export function decorate<T extends DNode>(dNodes: DNode | DNode[], options: DecorateOptions<T>): DNode | DNode[];
export function decorate<T extends DNode>(dNodes: DNode, modifier: Modifier<T>, predicate: Predicate<T>): DNode;
export function decorate<T extends DNode>(dNodes: DNode[], modifier: Modifier<T>, predicate: Predicate<T>): DNode[];
export function decorate<T extends DNode>(
dNodes: RenderResult,
modifier: Modifier<T>,
predicate: Predicate<T>
): RenderResult;
export function decorate(dNodes: DNode, modifier: Modifier<DNode>): DNode;
export function decorate(dNodes: DNode[], modifier: Modifier<DNode>): DNode[];
export function decorate(dNodes: RenderResult, modifier: Modifier<DNode>): RenderResult;
export function decorate(
dNodes: DNode | DNode[],
optionsOrModifier: Modifier<DNode> | DecorateOptions<DNode>,
predicate?: Predicate<DNode>
): DNode | DNode[] {
let shallow = false;
let modifier;
if (typeof optionsOrModifier === 'function') {
modifier = optionsOrModifier;
} else {
modifier = optionsOrModifier.modifier;
predicate = optionsOrModifier.predicate;
shallow = optionsOrModifier.shallow || false;
}
let nodes = Array.isArray(dNodes) ? [...dNodes] : [dNodes];
function breaker() {
nodes = [];
}
while (nodes.length) {
const node = nodes.shift();
if (node && node !== true) {
if (!shallow && (isWNode(node) || isVNode(node)) && node.children) {
nodes = [...nodes, ...node.children];
}
if (!predicate || predicate(node)) {
modifier(node, breaker);
}
}
}
return dNodes;
} | the_stack |
declare namespace pins {
//% fixedInstance shim=pxt::getPin(PIN_LED)
const LED: PwmOnlyPin;
//% fixedInstance shim=pxt::getPin(PIN_A0)
const A0: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A1)
const A1: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A2)
const A2: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A3)
const A3: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A4)
const A4: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A5)
const A5: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A6)
const A6: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A7)
const A7: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A8)
const A8: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A9)
const A9: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A10)
const A10: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A11)
const A11: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A12)
const A12: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A13)
const A13: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A14)
const A14: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_A15)
const A15: PwmPin;
//% fixedInstance shim=pxt::getPin(PIN_D0)
const D0: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D1)
const D1: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D2)
const D2: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D3)
const D3: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D4)
const D4: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D5)
const D5: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D6)
const D6: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D7)
const D7: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D8)
const D8: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D9)
const D9: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D10)
const D10: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D11)
const D11: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D12)
const D12: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D13)
const D13: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D14)
const D14: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D15)
const D15: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D16)
const D16: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D17)
const D17: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D18)
const D18: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D19)
const D19: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D20)
const D20: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D21)
const D21: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D22)
const D22: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D23)
const D23: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D24)
const D24: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D25)
const D25: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D26)
const D26: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D27)
const D27: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D28)
const D28: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D29)
const D29: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D30)
const D30: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D31)
const D31: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D32)
const D32: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D33)
const D33: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D34)
const D34: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D35)
const D35: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D36)
const D36: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D37)
const D37: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D38)
const D38: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D39)
const D39: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D40)
const D40: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D41)
const D41: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D42)
const D42: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D43)
const D43: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D44)
const D44: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D45)
const D45: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D46)
const D46: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D47)
const D47: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D48)
const D48: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D49)
const D49: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D50)
const D50: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D51)
const D51: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D52)
const D52: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_D53)
const D53: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_MISO)
const MISO: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_MOSI)
const MOSI: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_SCK)
const SCK: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_SCL)
const SCL: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_SDA)
const SDA: DigitalInOutPin;
// TODO only checked the following two for Metro M0
//% fixedInstance shim=pxt::getPin(PIN_RXLED)
const RXLED: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_TX)
const TXLED: DigitalInOutPin;
//% fixedInstance shim=pxt::getPin(PIN_NEOPIXEL)
const NEOPIXEL: DigitalInOutPin;
}
declare namespace input {
/**
* Button connecting A0 to GND.
*/
//% block="button A0" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A0,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA0: Button;
/**
* Button connecting A1 to GND.
*/
//% block="button A1" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A1,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA1: Button;
/**
* Button connecting A2 to GND.
*/
//% block="button A2" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A2,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA2: Button;
/**
* Button connecting A3 to GND.
*/
//% block="button A3" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A3,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA3: Button;
/**
* Button connecting A4 to GND.
*/
//% block="button A4" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A4,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA4: Button;
/**
* Button connecting A5 to GND.
*/
//% block="button A5" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A5,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA5: Button;
/**
* Button connecting A6 to GND.
*/
//% block="button A6" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A6,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA6: Button;
/**
* Button connecting A7 to GND.
*/
//% block="button A7" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A7,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA7: Button;
/**
* Button connecting A8 to GND.
*/
//% block="button A8" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A8,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA8: Button;
/**
* Button connecting A9 to GND.
*/
//% block="button A9" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A9,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA9: Button;
/**
* Button connecting A10 to GND.
*/
//% block="button A10" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A10,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA10: Button;
/**
* Button connecting A11 to GND.
*/
//% block="button A11" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A11,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA11: Button;
/**
* Button connecting A12 to GND.
*/
//% block="button A12" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A12,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA12: Button;
/**
* Button connecting A13 to GND.
*/
//% block="button A13" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A13,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA13: Button;
/**
* Button connecting A14 to GND.
*/
//% block="button A14" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A14,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA14: Button;
/**
* Button connecting A15 to GND.
*/
//% block="button A15" fixedInstance
//% shim=pxt::getButtonByPin(PIN_A15,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonA15: Button;
/**
* Button connecting D0 to GND.
*/
//% block="button D0" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D0,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD0: Button;
/**
* Button connecting D1 to GND.
*/
//% block="button D1" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D1,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD1: Button;
/**
* Button connecting D2 to GND.
*/
//% block="button D2" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D2,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD2: Button;
/**
* Button connecting D3 to GND.
*/
//% block="button D3" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D3,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD3: Button;
/**
* Button connecting D4 to GND.
*/
//% block="button D4" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D4,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD4: Button;
/**
* Button connecting D5 to GND.
*/
//% block="button D5" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D5,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD5: Button;
/**
* Button connecting D6 to GND.
*/
//% block="button D6" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D6,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD6: Button;
/**
* Button connecting D7 to GND.
*/
//% block="button D7" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D7,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD7: Button;
/**
* Button connecting D8 to GND.
*/
//% block="button D8" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D8,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD8: Button;
/**
* Button connecting D9 to GND.
*/
//% block="button D9" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D9,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD9: Button;
/**
* Button connecting D10 to GND.
*/
//% block="button D10" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D10,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD10: Button;
/**
* Button connecting D11 to GND.
*/
//% block="button D11" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D11,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD11: Button;
/**
* Button connecting D12 to GND.
*/
//% block="button D12" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D12,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD12: Button;
/**
* Button connecting D13 to GND.
*/
//% block="button D13" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D13,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD13: Button;
/**
* Button connecting D14 to GND.
*/
//% block="button D14" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D14,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD14: Button;
/**
* Button connecting D15 to GND.
*/
//% block="button D15" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D15,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD15: Button;
/**
* Button connecting D16 to GND.
*/
//% block="button D16" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D16,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD16: Button;
/**
* Button connecting D17 to GND.
*/
//% block="button D17" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D17,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD17: Button;
/**
* Button connecting D18 to GND.
*/
//% block="button D18" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D18,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD18: Button;
/**
* Button connecting D19 to GND.
*/
//% block="button D19" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D19,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD19: Button;
/**
* Button connecting D20 to GND.
*/
//% block="button D20" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D20,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD20: Button;
/**
* Button connecting D21 to GND.
*/
//% block="button D21" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D21,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD21: Button;
/**
* Button connecting D22 to GND.
*/
//% block="button D22" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D22,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD22: Button;
/**
* Button connecting D23 to GND.
*/
//% block="button D23" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D23,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD23: Button;
/**
* Button connecting D24 to GND.
*/
//% block="button D24" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D24,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD24: Button;
/**
* Button connecting D25 to GND.
*/
//% block="button D25" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D25,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD25: Button;
/**
* Button connecting D26 to GND.
*/
//% block="button D26" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D26,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD26: Button;
/**
* Button connecting D27 to GND.
*/
//% block="button D27" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D27,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD27: Button;
/**
* Button connecting D28 to GND.
*/
//% block="button D28" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D28,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD28: Button;
/**
* Button connecting D29 to GND.
*/
//% block="button D29" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D29,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD29: Button;
/**
* Button connecting D30 to GND.
*/
//% block="button D30" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D30,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD30: Button;
/**
* Button connecting D31 to GND.
*/
//% block="button D31" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D31,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD31: Button;
/**
* Button connecting D32 to GND.
*/
//% block="button D32" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D32,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD32: Button;
/**
* Button connecting D33 to GND.
*/
//% block="button D33" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D33,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD33: Button;
/**
* Button connecting D34 to GND.
*/
//% block="button D34" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D34,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD34: Button;
/**
* Button connecting D35 to GND.
*/
//% block="button D35" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D35,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD35: Button;
/**
* Button connecting D36 to GND.
*/
//% block="button D36" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D36,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD36: Button;
/**
* Button connecting D37 to GND.
*/
//% block="button D37" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D37,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD37: Button;
/**
* Button connecting D38 to GND.
*/
//% block="button D38" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D38,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD38: Button;
/**
* Button connecting D39 to GND.
*/
//% block="button D39" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D39,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD39: Button;
/**
* Button connecting D40 to GND.
*/
//% block="button D40" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D40,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD40: Button;
/**
* Button connecting D41 to GND.
*/
//% block="button D41" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D41,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD41: Button;
/**
* Button connecting D42 to GND.
*/
//% block="button D42" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D42,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD42: Button;
/**
* Button connecting D43 to GND.
*/
//% block="button D43" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D43,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD43: Button;
/**
* Button connecting D44 to GND.
*/
//% block="button D44" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D44,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD44: Button;
/**
* Button connecting D45 to GND.
*/
//% block="button D45" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D45,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD45: Button;
/**
* Button connecting D46 to GND.
*/
//% block="button D46" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D46,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD46: Button;
/**
* Button connecting D47 to GND.
*/
//% block="button D47" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D47,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD47: Button;
/**
* Button connecting D48 to GND.
*/
//% block="button D48" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D48,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD48: Button;
/**
* Button connecting D49 to GND.
*/
//% block="button D49" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D49,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD49: Button;
/**
* Button connecting D50 to GND.
*/
//% block="button D50" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D50,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD50: Button;
/**
* Button connecting D51 to GND.
*/
//% block="button D51" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D51,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD51: Button;
/**
* Button connecting D52 to GND.
*/
//% block="button D52" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D52,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD52: Button;
/**
* Button connecting D53 to GND.
*/
//% block="button D53" fixedInstance
//% shim=pxt::getButtonByPin(PIN_D53,BUTTON_ACTIVE_LOW_PULL_UP)
//% parts="buttons"
const buttonD53: Button;
} | the_stack |
import React, { useState, useEffect, useRef } from 'react';
import throttle from 'lodash.throttle';
import { CopyToClipboard } from 'react-copy-to-clipboard';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import {
faCamera,
faCopy,
faCheck,
faCheckCircle,
faTimes,
faChevronUp,
faChevronDown,
} from '@fortawesome/free-solid-svg-icons';
import Recorder from './recorder';
import Highlighter from './Highlighter';
import ActionList from './ActionList';
import CodeGen from './CodeGen';
import genSelectors, { getBestSelectorForAction } from '../builders/selector';
import { genCode } from '../builders';
import ScriptTypeSelect from '../Common/ScriptTypeSelect';
import { usePreferredLibrary, usePreferredBarPosition } from '../Common/hooks';
import type { Action } from '../types';
import {
ActionType,
ActionsMode,
ScriptType,
TagName,
BarPosition,
} from '../types';
import ControlBarStyle from './ControlBar.css';
import { endRecording } from '../Common/endRecording';
const ActionButton = ({
onClick,
children,
label,
testId,
}: {
onClick: () => void;
children: JSX.Element;
label: String;
testId?: String;
}) => (
<div className="ActionButton" onClick={onClick} data-testid={testId}>
<div>
<div
style={{
height: 32,
width: 32,
position: 'relative',
margin: '0 auto',
marginBottom: '0.5em',
}}
>
{children}
</div>
<div style={{ fontSize: 12, marginTop: 4 }}>{label}</div>
</div>
</div>
);
function RenderActionText({ action }: { action: Action }) {
return (
<>
{action.type === ActionType.Click
? `Click on ${action.tagName.toLowerCase()} ${getBestSelectorForAction(
action,
ScriptType.Playwright
)}`
: action.type === ActionType.Hover
? `Hover over ${action.tagName.toLowerCase()} ${getBestSelectorForAction(
action,
ScriptType.Playwright
)}`
: action.type === ActionType.Input
? `Fill "${
action.isPassword
? '*'.repeat(action?.value?.length ?? 0)
: action.value
}" on ${action.tagName.toLowerCase()} ${getBestSelectorForAction(
action,
ScriptType.Playwright
)}`
: action.type === ActionType.Keydown
? `Press ${action.key} on ${action.tagName.toLowerCase()}`
: action.type === ActionType.Load
? `Load "${action.url}"`
: action.type === ActionType.Resize
? `Resize window to ${action.width} x ${action.height}`
: action.type === ActionType.Wheel
? `Scroll wheel by X:${action.deltaX}, Y:${action.deltaY}`
: action.type === ActionType.FullScreenshot
? `Take full page screenshot`
: action.type === ActionType.AwaitText
? `Wait for text "${action.text}"`
: action.type === ActionType.DragAndDrop
? `Drag n Drop from (${action.sourceX}, ${action.sourceY}) to (${action.targetX}, ${action.targetY})`
: ''}
</>
);
}
function isElementFromOverlay(element: HTMLElement) {
if (element == null) return false;
return element.closest('#overlay-controls') != null;
}
export default function ControlBar({ onExit }: { onExit: () => void }) {
const [barPosition, setBarPosition] = usePreferredBarPosition(
BarPosition.Bottom
);
const [hoveredElement, setHoveredElement] = useState<HTMLElement | null>(
null
);
const [hoveredElementSelectors, setHoveredElementSelectors] = useState<any>(
{}
);
const [lastAction, setLastAction] = useState<Action | null>(null);
const [actions, setActions] = useState<Action[]>([]);
const [showAllActions, setShowAllActions] = useState<boolean>(false);
const [showActionsMode, setShowActionsMode] = useState<ActionsMode>(
ActionsMode.Code
);
const [preferredLibrary, setPreferredLibrary] = usePreferredLibrary();
const [copyCodeConfirm, setCopyCodeConfirm] = useState<boolean>(false);
const [screenshotConfirm, setScreenshotConfirm] = useState<boolean>(false);
const [isFinished, setIsFinished] = useState<boolean>(false);
const [isOpen, setIsOpen] = useState<boolean>(true);
const handleMouseMoveRef = useRef((_: MouseEvent) => {});
const recorderRef = useRef<Recorder | null>(null);
const onEndRecording = () => {
setIsFinished(true);
// Show Code
setShowAllActions(true);
// Clear out highlighter
document.removeEventListener('mousemove', handleMouseMoveRef.current, true);
setHoveredElement(null);
// Turn off recorder
recorderRef.current?.deregister();
endRecording();
};
const onClose = () => {
setIsOpen(false);
onExit();
};
useEffect(() => {
handleMouseMoveRef.current = throttle((event: MouseEvent) => {
const x = event.clientX,
y = event.clientY,
elementMouseIsOver = document.elementFromPoint(x, y) as HTMLElement;
if (
!isElementFromOverlay(elementMouseIsOver) &&
elementMouseIsOver != null
) {
const { parentElement } = elementMouseIsOver;
// Match the logic in recorder.ts for link clicks
const element =
parentElement?.tagName === 'A' ? parentElement : elementMouseIsOver;
setHoveredElement(element || null);
setHoveredElementSelectors(genSelectors(element));
}
}, 100);
document.addEventListener('mousemove', handleMouseMoveRef.current, true);
recorderRef.current = new Recorder({
onAction: (action: Action, actions: Action[]) => {
setLastAction(action);
setActions(actions);
},
onInitialized: (lastAction: Action, recording: Action[]) => {
setLastAction(
recording.reduceRight<Action | null>(
(p, v) => (p == null && v.type != 'navigate' ? v : p),
null
)
);
setActions(recording);
},
});
// Set recording to be finished if somewhere else (ex. popup) the state has been set to be finished
chrome.storage.onChanged.addListener((changes) => {
if (
changes.recordingState != null &&
changes.recordingState.newValue === 'finished' &&
// Firefox will fire change events even if the values are not changed
changes.recordingState.newValue !== changes.recordingState.oldValue
) {
if (!isFinished) {
onEndRecording();
}
}
});
}, []);
const displayedScriptType = preferredLibrary ?? ScriptType.Playwright;
const rect = hoveredElement?.getBoundingClientRect();
const displayedSelector = getBestSelectorForAction(
{
type: ActionType.Click,
tagName: (hoveredElement?.tagName ?? '') as TagName,
inputType: undefined,
value: undefined,
selectors: hoveredElementSelectors || {},
timestamp: 0,
isPassword: false,
hasOnlyText:
hoveredElement?.children?.length === 0 &&
hoveredElement?.innerText?.length > 0,
},
displayedScriptType
);
if (isOpen === false) {
return <> </>;
}
return (
<>
<style>{ControlBarStyle}</style>
{rect != null && rect.top != null && (
<Highlighter rect={rect} displayedSelector={displayedSelector ?? ''} />
)}
<div
className="ControlBar rr-ignore"
id="overlay-controls"
style={{
...(barPosition === BarPosition.Bottom
? {
bottom: 35,
}
: { top: 35 }),
height: showAllActions ? 330 : 100,
}}
>
{isFinished ? (
<div className="p-4">
<div className="d-flex justify-between mb-2">
<div className="text-xl">
<span className="mr-2" data-testid="recording-finished">
Recording Finished!
</span>
🎉
</div>
<div className="text-button" onClick={() => onClose()}>
<FontAwesomeIcon icon={faTimes} size="sm" />
</div>
</div>
<div className="d-flex justify-between items-center">
<div className="text-sm text-grey">
Below is the generated code for this recording.
</div>
<div className="d-flex">
<div
className="text-sm link-button"
onClick={() => setShowAllActions(!showAllActions)}
>
{showAllActions ? 'Collapse' : 'See'} Recording Steps{' '}
<FontAwesomeIcon
icon={showAllActions ? faChevronUp : faChevronDown}
/>
</div>
</div>
</div>
</div>
) : (
<div className="d-flex items-center">
<ActionButton
label="End Rec"
onClick={() => onEndRecording()}
testId="end-test"
>
<FontAwesomeIcon icon={faCheckCircle} size="2x" />
</ActionButton>
<div className="w-100 p-4">
<div className="d-flex justify-between" style={{ fontSize: 14 }}>
<div className="text-grey">Last Action</div>
<div
className="text-grey text-sm text-button"
onClick={() =>
setBarPosition(
barPosition === BarPosition.Bottom
? BarPosition.Top
: BarPosition.Bottom
)
}
>
Move Overlay to{' '}
{barPosition === BarPosition.Bottom ? 'Top' : 'Bottom'}
</div>
</div>
<div
className="d-flex justify-between items-end"
style={{ marginTop: 12 }}
>
<div className="last-action-preview">
{lastAction != null && (
<RenderActionText action={lastAction} />
)}
</div>
<div
className="text-sm link-button"
data-testid={
showAllActions ? 'show-less-actions' : 'show-more-actions'
}
onClick={() => setShowAllActions(!showAllActions)}
>
{showAllActions ? 'Collapse Overlay' : 'Expand Overlay'}{' '}
<FontAwesomeIcon
icon={showAllActions ? faChevronUp : faChevronDown}
/>
</div>
</div>
</div>
</div>
)}
{showAllActions && (
<div className="actions-wrapper p-4" style={{}}>
<div style={{ display: 'flex', justifyContent: 'space-between' }}>
<div className="mb-4">
<span
className="text-sm link-button mr-2"
data-testid={`show-${
showActionsMode === ActionsMode.Actions
? ActionsMode.Code
: ActionsMode.Actions
}`}
onClick={() => {
setShowActionsMode(
showActionsMode === ActionsMode.Actions
? ActionsMode.Code
: ActionsMode.Actions
);
}}
>
Show{' '}
{showActionsMode === ActionsMode.Actions ? 'Code' : 'Actions'}
</span>
{!isFinished && (
<span
className={`text-sm link-button mr-2 ${
screenshotConfirm ? 'text-green' : ''
}`}
data-testid="record-screenshot"
onClick={() => {
recorderRef.current?.onFullScreenshot();
setScreenshotConfirm(true);
setTimeout(() => {
setScreenshotConfirm(false);
}, 2000);
}}
>
<FontAwesomeIcon
icon={screenshotConfirm ? faCheck : faCamera}
size="sm"
/>{' '}
Record Screenshot
</span>
)}
</div>
<div>
{showActionsMode === ActionsMode.Code && (
<>
<ScriptTypeSelect
value={displayedScriptType}
onChange={setPreferredLibrary}
/>
<CopyToClipboard
text={genCode(actions, true, displayedScriptType)}
onCopy={() => {
setCopyCodeConfirm(true);
setTimeout(() => {
setCopyCodeConfirm(false);
}, 2000);
}}
>
<span
className={`text-sm link-button ${
copyCodeConfirm ? 'text-green' : ''
}`}
>
<FontAwesomeIcon
icon={copyCodeConfirm ? faCheck : faCopy}
size="sm"
/>{' '}
Copy Code
</span>
</CopyToClipboard>
</>
)}
</div>
</div>
{showActionsMode === ActionsMode.Code && (
<CodeGen actions={actions} library={displayedScriptType} />
)}
{showActionsMode === ActionsMode.Actions && (
<ActionList actions={actions} />
)}
</div>
)}
</div>
</>
);
} | the_stack |
* Group Service deals with the logic for categorical and numeric
* grouping.
*/
// tslint:disable:no-new-decorators
import * as d3 from 'd3'; // Used for creating bins, not visualization.
import {computed} from 'mobx';
import {FacetMap, GroupedExamples, IndexedInput} from '../lib/types';
import {facetMapToDictKey, findSpecKeys, roundToDecimalPlaces} from '../lib/utils';
import {LitService} from './lit_service';
import {AppState} from './state_service';
/**
* A map of categorical features to their possible values.
*/
export interface CategoricalFeatures {
[feature: string]: string[];
}
/**
* A map of numeric features to their min and max values.
*/
export interface NumericFeatures {
[feature: string]: [number, number];
}
interface NumericBins {
[name: string]: number[];
}
/**
* A map of numeric features to their categorical bins.
*/
export interface NumericFeatureBins {
[feature: string]: NumericBins;
}
/**
* Function type for getting a (possibly binned) feature value from an
* IndexedInput. Inputs are the faceting configs, the datapoint, its index in
* the current data, and the feature of interest.
*/
export type GetFeatureFunc = (
bins: NumericFeatureBins, datum: IndexedInput, index: number,
feature: string) => number | string | number[] | null;
/**
* Enumeration of the different faceting methods supported by the GroupService.
*
* - DISCRETE: Creates a bin for every discrete value for this feature. For
* numerical features, this is derived by using the step configured for that
* feature in the dataset spec to traverse the features range. For
* categorical features, it creates a bin for each vaue in the vocabulary.
* For binary features, it creates 2 bins.
* - EQUAL_INTERVAL: Divides the domain into N equal-width bins
* - QUANTILE: Divides the range into N bins with an equal number of datapoints
* - THRESHOLD: Divides the range into 2 bins, [m, t) and [t, M] where m is the
* minimum value of the range, M is the maximum value of the range, and t is
* the threshold
*/
export enum FacetingMethod {
DISCRETE = 'discrete',
EQUAL_INTERVAL = 'equal-interval',
QUANTILE = 'quantile',
THRESHOLD = 'threshold'
}
/**
* Definition for how to facet a feature into a series of bins
*
* The feature type associated with featureName impacts the behavior as follows.
*
* * Boolean features: method, numBins, and threshold are ignored as these
* features always produce 2 bins, one for `true` and one for `false`.
* * Categorical features: method, numBins, and threshold are ignored as these
* features always produce one bin for each of the labels in the vocabulary
* * Numerical features
* * `method` is strongly encouraged, omission will result in the
* generation of DISCRETE bins.
* * `numBins` is encouraged for the EQUAL_INTERVAL method; when provided
* and >= 1, the service generates that many bins, otherwise the service
* infers the correct number of bins to generate using the Freedman-
* Diaconis algorithm.
* * `numBins` is required the QUANTILE method, where it must be >= 1.
* * `threshold` is requires for the THRESHOLD method.
*/
export interface FacetingConfig {
featureName: string;
method?: FacetingMethod;
numBins?: number;
threshold?: number;
}
/**
* A singleton class that handles grouping.
*/
export class GroupService extends LitService {
constructor(private readonly appState: AppState) {
super();
}
/** Get the names of categorical features. */
@computed
get categoricalFeatureNames(): string[] {
const dataSpec = this.appState.currentDatasetSpec;
const names = findSpecKeys(dataSpec, 'CategoryLabel');
return names;
}
/** Get the names of the numerical features. */
@computed
get numericalFeatureNames(): string[] {
const dataSpec = this.appState.currentDatasetSpec;
const names = findSpecKeys(dataSpec, 'Scalar');
return names;
}
/** Get the names of the boolean features. */
@computed
get booleanFeatureNames(): string[] {
const dataSpec = this.appState.currentDatasetSpec;
const names = findSpecKeys(dataSpec, 'Boolean');
return names;
}
/** Get the names of all dense features (boolean, categorical, and numeric) */
@computed
get denseFeatureNames(): string[] {
return [...this.categoricalFeatureNames, ...this.numericalFeatureNames,
...this.booleanFeatureNames];
}
/**
* Get the names of categorical features, and their possible values.
*/
@computed
get categoricalFeatures(): CategoricalFeatures {
const categoricalFeatures: CategoricalFeatures = {};
for (const name of this.categoricalFeatureNames) {
const vocab = this.appState.currentDatasetSpec[name].vocab;
if (vocab != null) {
// Use specified vocabulary, if available.
categoricalFeatures[name] = [...vocab];
} else {
// Otherwise, find unique values from the data.
const uniqueValues = new Set(this.appState.currentInputData.map(
(d: IndexedInput) => d.data[name]));
categoricalFeatures[name] = [...uniqueValues];
}
}
return categoricalFeatures;
}
/**
* Get the names of numeric features, and their min and max values.
*/
@computed
get numericalFeatureRanges(): NumericFeatures {
const numericFeatures: NumericFeatures = {};
this.numericalFeatureNames.forEach(feat => {
const values = this.appState.currentInputData.map((d: IndexedInput) => {
return d.data[feat];
});
const min = Math.min(...values);
const max = Math.max(...values);
numericFeatures[feat] = [min, max];
});
return numericFeatures;
}
private numericBinKey(start: number, end: number, isLast: boolean) {
return `[${start}, ${end}${isLast ? ']' : ')'}`;
}
private numericBinRange(start: number, end: number, isLast: boolean) {
return [start, end + (isLast ? 1e-6 : 0)];
}
private freedmanDiaconisBins(feat: string): NumericBins {
const min = this.numericalFeatureRanges[feat][0];
const max = this.numericalFeatureRanges[feat][1];
const values = this.appState.currentInputData
.map(d => d.data[feat] as number);
// The number of bins that the domain is divided into is specified by the
// FreedmanDiaconis algorithm. The first bin.x0 is always equal to the
// minimum domain value, and the last bin.x1 is always equal to the
// maximum domain value. Fall back to a sensible default if the algorithm
// returns an invalid value.
let numBins = d3.thresholdFreedmanDiaconis(values, min, max);
if (numBins === 0 || !isFinite(numBins)) {
numBins = 10;
}
const generator = d3.histogram<number, number>()
.domain([min, max])
.thresholds(numBins);
const generatedBins = generator(values);
const bins: NumericBins = {};
for (const bin of generatedBins) {
const isLastBin = bin.x1 === max;
const start = roundToDecimalPlaces(bin.x0!, 3);
const end = roundToDecimalPlaces(bin.x1!, 3);
// Return if there's an error in the histogram generator and the bin
// object doesn't contain valid boundary numbers.
if (start == null || end == null) continue;
const range = this.numericBinRange(start, end, isLastBin);
const key = this.numericBinKey(start, end, isLastBin);
bins[key] = range;
}
return bins;
}
private discreteBins(feat:string): NumericBins {
const bins: NumericBins = {};
const [min, max] = this.numericalFeatureRanges[feat];
const {step} = this.appState.currentDatasetSpec[feat];
if (typeof step !== 'number') {
throw (new Error(
`Unable to generate discrete bins; '${feat}' step is not defined`));
}
for (let lower = min; lower < max; lower += step) {
const upper = lower + step;
const binKey = this.numericBinKey(lower, upper, upper === max);
bins[binKey] = [lower, upper];
}
return bins;
}
private equalIntervalBins(feat: string, numBins: number): NumericBins {
const bins: NumericBins = {};
const min = this.numericalFeatureRanges[feat][0];
const max = this.numericalFeatureRanges[feat][1];
const step = (max - min) / numBins;
for (let i = 0; i < numBins; i++) {
const start = roundToDecimalPlaces(min + step * i, 3);
const end = roundToDecimalPlaces(min + step * (i + 1), 3);
const range = this.numericBinRange(start, end, (i === (numBins - 1)));
const key = this.numericBinKey(start, end, (i === (numBins - 1)));
bins[key] = range;
}
return bins;
}
private quantileBins(feat: string, numBins: number): NumericBins {
const bins: NumericBins = {};
const values = this.appState.currentInputData
.map(d => d.data[feat] as number)
.sort();
numBins = Math.min(numBins, values.length);
const step = values.length / numBins;
for (let i = 0; i < numBins; i++) {
const isLast = i === (numBins - 1);
const start = step * i;
const end = isLast ? values.length - 1 : step * (i + 1);
const lower = roundToDecimalPlaces(values[start], 3);
const upper = roundToDecimalPlaces(values[end], 3);
const range = this.numericBinRange(lower, upper, isLast);
const key = this.numericBinKey(lower, upper, isLast);
bins[key] = range;
}
return bins;
}
private thresholdBins(feat: string, threshold: number): NumericBins {
const [min, max] = this.numericalFeatureRanges[feat];
const [rMin, rMax] = [min, max].map(v => roundToDecimalPlaces(v, 3));
const rThresh = roundToDecimalPlaces(threshold, 3);
return {
[this.numericBinKey(rMin, rThresh, false)]: [min, threshold - 1e-6],
[this.numericBinKey(rThresh, rMax, true)]: [threshold, max + 1e-6]
};
}
/**
* Converts a list of FacetingConfigs into a string, ordered by featureName.
*/
facetConfigsToKey(configs: FacetingConfig[]): string {
const validConfigs = configs.sort((a, b) => {
if (a.featureName < b.featureName) { return -1; }
if (a.featureName > b.featureName) { return 1; }
return 0;
});
return JSON.stringify(validConfigs);
}
/**
* Determines if a faceting configuration is valid. This does not determine if
* the config is appropriate for the feature type.
*
* Rules for different method values:
*
* * DISCRETE and EQUAL_INTERVAL require nothing
* * QUANTILE requires numBins is a positive number
* * THRESHOLD requries threshold is provided
*/
validateFacetingConfig(config:FacetingConfig): boolean {
const {featureName, method, numBins, threshold} = config;
const isDiscrete = (method == null || method === FacetingMethod.DISCRETE);
const isEqInt = method === FacetingMethod.EQUAL_INTERVAL;
const isQuant = method === FacetingMethod.QUANTILE &&
typeof numBins === 'number' && numBins > 0;
const isThreshold = method === FacetingMethod.THRESHOLD &&
typeof threshold === 'number';
const isValid = isDiscrete || isEqInt || isQuant || isThreshold;
if (!isValid) {
const confstr = JSON.stringify(config);
throw new Error(
`Invalid faceting config for '${featureName}': ${confstr}`);
}
return isValid;
}
/**
* Filters a list of faceting configurations to only those that are valid.
*/
validateFacetingConfigs(configs:FacetingConfig[]): FacetingConfig[] {
return configs.filter(this.validateFacetingConfig);
}
/**
* Get the names of numeric features, and their bins. This relies on
* numericalFeatureRanges.
*/
numericalFeatureBins(configs: FacetingConfig[]): NumericFeatureBins {
const featureBins: NumericFeatureBins = {};
const numericConfigs = configs
.filter(c => this.numericalFeatureNames.includes(c.featureName));
for (const config of numericConfigs) {
const {featureName, method, numBins, threshold} = config;
if (method === FacetingMethod.EQUAL_INTERVAL) {
featureBins[featureName] = (numBins == null || numBins < 1) ?
this.freedmanDiaconisBins(featureName) :
this.equalIntervalBins(featureName, numBins);
} else if (method === FacetingMethod.QUANTILE) {
featureBins[featureName] = this.quantileBins(featureName, numBins || 4);
} else if (method === FacetingMethod.THRESHOLD && threshold != null) {
featureBins[featureName] = this.thresholdBins(featureName, threshold);
} else {
featureBins[featureName] = this.discreteBins(featureName);
}
}
return featureBins;
}
/**
* Find the correct feature bin for this input. Returns the bin values, or
* null if the datapoint should not be in any of the bins.
*/
getNumericalBinForExample(
bins: NumericFeatureBins, input: IndexedInput, feature: string): number[] | null {
const value = input.data[feature];
for (const bin of Object.values(bins[feature])) {
const [start, end] = bin;
if (start <= value && value < end) { return bin; }
}
return null;
}
/**
* Create a label to tell the user how many intersectional groups there are
* between all possible values of the given features (e.g., "6 x 2 = 12") if
* there are two features, with 6 and 2 values respectively.
*/
numIntersectionsLabel(bins: NumericFeatureBins, features: string[]): string {
const numLabels = features.map(feature => {
if (this.categoricalFeatureNames.includes(feature)) {
return this.categoricalFeatures[feature].length;
}
if (this.numericalFeatureNames.includes(feature)) {
return Object.keys(bins[feature]).length;
}
if (this.booleanFeatureNames.includes(feature)) {
return 2;
}
return 0;
});
const total = numLabels.reduce((a, b) => a * b);
return numLabels.length > 1 ? `${numLabels.join('x')} = ${total}` :
numLabels[0].toString();
}
/**
* Given a set of IndexedInputs and a set of features (each of which has a set
* of possible values or bins), organize the datapoints by all intersectional
* combinations of the these, and return a dict of keys to groups.
*
* By default, assumes that the features are IndexedInput data features.
* However, an optional getFeatValForInput function can be provided to use
* this with arbitrary features (e.g., predicted values as in the
* matrix_module.)
*/
groupExamplesByFeatures(
bins: NumericFeatureBins, data: IndexedInput[], features: string[],
getFeatValForInput: GetFeatureFunc = (b, d, i, f) =>
this.getFeatureValForInput(b, d, f)): GroupedExamples {
const facetedData: GroupedExamples = {};
// Loop over each datapoint to see what bin it belongs into.
for (let i = 0; i < data.length; i++) {
// Filter the data features for those that we are faceting by.
const datum = data[i];
const dFilters: FacetMap = {};
for (const feature of features) {
const val = getFeatValForInput(bins, datum, i, feature);
if (val == null) { continue; }
if (Array.isArray(val)) { // Numeric features are number[], need key
const binIdx = Object.values(bins[feature]).indexOf(val);
const displayVal = Object.keys(bins[feature])[binIdx];
dFilters[feature] = {val, displayVal};
} else { // Otherwise, val is the displayVal
dFilters[feature] = {val, displayVal: val.toString()};
}
}
// Make a dictionary key from this set of features.
const comboKey = facetMapToDictKey(dFilters);
// If there haven't been any other datapoints with this combination of
// filters, start a new facet.
if (!facetedData.hasOwnProperty(comboKey)) {
facetedData[comboKey] = {
displayName: comboKey,
data: [],
facets: dFilters
};
}
facetedData[comboKey].data.push(datum);
}
return facetedData;
}
/**
* Get the feature value for a datapoint. Will return the binned value,
* if the datapoint is numerical. Will return null if the datapoint does
* not have the feature.
*/
getFeatureValForInput(
bins: NumericFeatureBins, d: IndexedInput, feature: string): string | null {
if (feature in d.data) {
const isNumerical = this.numericalFeatureNames.includes(feature);
return isNumerical ? this.getNumericalBinForExample(bins, d, feature) :
d.data[feature];
}
return null;
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Manages an Azure IoT Time Series Insights Gen2 Environment.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
*
* const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
* const storage = new azure.storage.Account("storage", {
* location: exampleResourceGroup.location,
* resourceGroupName: exampleResourceGroup.name,
* accountTier: "Standard",
* accountReplicationType: "LRS",
* });
* const exampleTimeSeriesInsightsGen2Environment = new azure.iot.TimeSeriesInsightsGen2Environment("exampleTimeSeriesInsightsGen2Environment", {
* location: exampleResourceGroup.location,
* resourceGroupName: exampleResourceGroup.name,
* skuName: "L1",
* warmStoreDataRetentionTime: "P30D",
* idProperties: ["id"],
* storage: {
* name: storage.name,
* key: storage.primaryAccessKey,
* },
* });
* ```
*
* ## Import
*
* Azure IoT Time Series Insights Gen2 Environment can be imported using the `resource id`, e.g.
*
* ```sh
* $ pulumi import azure:iot/timeSeriesInsightsGen2Environment:TimeSeriesInsightsGen2Environment example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.TimeSeriesInsights/environments/example
* ```
*/
export class TimeSeriesInsightsGen2Environment extends pulumi.CustomResource {
/**
* Get an existing TimeSeriesInsightsGen2Environment resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: TimeSeriesInsightsGen2EnvironmentState, opts?: pulumi.CustomResourceOptions): TimeSeriesInsightsGen2Environment {
return new TimeSeriesInsightsGen2Environment(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure:iot/timeSeriesInsightsGen2Environment:TimeSeriesInsightsGen2Environment';
/**
* Returns true if the given object is an instance of TimeSeriesInsightsGen2Environment. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is TimeSeriesInsightsGen2Environment {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === TimeSeriesInsightsGen2Environment.__pulumiType;
}
/**
* The FQDN used to access the environment data.
*/
public /*out*/ readonly dataAccessFqdn!: pulumi.Output<string>;
/**
* A list of property ids for the Azure IoT Time Series Insights Gen2 Environment
*/
public readonly idProperties!: pulumi.Output<string[]>;
/**
* Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
*/
public readonly location!: pulumi.Output<string>;
/**
* Specifies the name of the Azure IoT Time Series Insights Gen2 Environment. Changing this forces a new resource to be created. Must be globally unique.
*/
public readonly name!: pulumi.Output<string>;
/**
* The name of the resource group in which to create the Azure IoT Time Series Insights Gen2 Environment.
*/
public readonly resourceGroupName!: pulumi.Output<string>;
/**
* Specifies the SKU Name for this IoT Time Series Insights Gen2 Environment. Currently it supports only `L1`. For gen2, capacity cannot be specified.
*/
public readonly skuName!: pulumi.Output<string>;
/**
* A `storage` block as defined below.
*/
public readonly storage!: pulumi.Output<outputs.iot.TimeSeriesInsightsGen2EnvironmentStorage>;
/**
* A mapping of tags to assign to the resource.
*/
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* Specifies the ISO8601 timespan specifying the minimum number of days the environment's events will be available for query. Changing this forces a new resource to be created.
*/
public readonly warmStoreDataRetentionTime!: pulumi.Output<string | undefined>;
/**
* Create a TimeSeriesInsightsGen2Environment resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: TimeSeriesInsightsGen2EnvironmentArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: TimeSeriesInsightsGen2EnvironmentArgs | TimeSeriesInsightsGen2EnvironmentState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as TimeSeriesInsightsGen2EnvironmentState | undefined;
inputs["dataAccessFqdn"] = state ? state.dataAccessFqdn : undefined;
inputs["idProperties"] = state ? state.idProperties : undefined;
inputs["location"] = state ? state.location : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["resourceGroupName"] = state ? state.resourceGroupName : undefined;
inputs["skuName"] = state ? state.skuName : undefined;
inputs["storage"] = state ? state.storage : undefined;
inputs["tags"] = state ? state.tags : undefined;
inputs["warmStoreDataRetentionTime"] = state ? state.warmStoreDataRetentionTime : undefined;
} else {
const args = argsOrState as TimeSeriesInsightsGen2EnvironmentArgs | undefined;
if ((!args || args.idProperties === undefined) && !opts.urn) {
throw new Error("Missing required property 'idProperties'");
}
if ((!args || args.resourceGroupName === undefined) && !opts.urn) {
throw new Error("Missing required property 'resourceGroupName'");
}
if ((!args || args.skuName === undefined) && !opts.urn) {
throw new Error("Missing required property 'skuName'");
}
if ((!args || args.storage === undefined) && !opts.urn) {
throw new Error("Missing required property 'storage'");
}
inputs["idProperties"] = args ? args.idProperties : undefined;
inputs["location"] = args ? args.location : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["skuName"] = args ? args.skuName : undefined;
inputs["storage"] = args ? args.storage : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["warmStoreDataRetentionTime"] = args ? args.warmStoreDataRetentionTime : undefined;
inputs["dataAccessFqdn"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(TimeSeriesInsightsGen2Environment.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering TimeSeriesInsightsGen2Environment resources.
*/
export interface TimeSeriesInsightsGen2EnvironmentState {
/**
* The FQDN used to access the environment data.
*/
dataAccessFqdn?: pulumi.Input<string>;
/**
* A list of property ids for the Azure IoT Time Series Insights Gen2 Environment
*/
idProperties?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
*/
location?: pulumi.Input<string>;
/**
* Specifies the name of the Azure IoT Time Series Insights Gen2 Environment. Changing this forces a new resource to be created. Must be globally unique.
*/
name?: pulumi.Input<string>;
/**
* The name of the resource group in which to create the Azure IoT Time Series Insights Gen2 Environment.
*/
resourceGroupName?: pulumi.Input<string>;
/**
* Specifies the SKU Name for this IoT Time Series Insights Gen2 Environment. Currently it supports only `L1`. For gen2, capacity cannot be specified.
*/
skuName?: pulumi.Input<string>;
/**
* A `storage` block as defined below.
*/
storage?: pulumi.Input<inputs.iot.TimeSeriesInsightsGen2EnvironmentStorage>;
/**
* A mapping of tags to assign to the resource.
*/
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Specifies the ISO8601 timespan specifying the minimum number of days the environment's events will be available for query. Changing this forces a new resource to be created.
*/
warmStoreDataRetentionTime?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a TimeSeriesInsightsGen2Environment resource.
*/
export interface TimeSeriesInsightsGen2EnvironmentArgs {
/**
* A list of property ids for the Azure IoT Time Series Insights Gen2 Environment
*/
idProperties: pulumi.Input<pulumi.Input<string>[]>;
/**
* Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
*/
location?: pulumi.Input<string>;
/**
* Specifies the name of the Azure IoT Time Series Insights Gen2 Environment. Changing this forces a new resource to be created. Must be globally unique.
*/
name?: pulumi.Input<string>;
/**
* The name of the resource group in which to create the Azure IoT Time Series Insights Gen2 Environment.
*/
resourceGroupName: pulumi.Input<string>;
/**
* Specifies the SKU Name for this IoT Time Series Insights Gen2 Environment. Currently it supports only `L1`. For gen2, capacity cannot be specified.
*/
skuName: pulumi.Input<string>;
/**
* A `storage` block as defined below.
*/
storage: pulumi.Input<inputs.iot.TimeSeriesInsightsGen2EnvironmentStorage>;
/**
* A mapping of tags to assign to the resource.
*/
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Specifies the ISO8601 timespan specifying the minimum number of days the environment's events will be available for query. Changing this forces a new resource to be created.
*/
warmStoreDataRetentionTime?: pulumi.Input<string>;
} | the_stack |
import React from 'react'
import { render } from '@testing-library/react'
import { DateRangePicker } from './DateRangePicker'
import userEvent from '@testing-library/user-event'
const startDatePickerTestProps = {
id: 'start-date',
name: 'start-date',
}
const endDatePickerTestProps = {
id: 'end-date',
name: 'end-date',
}
describe('DateRangePicker component', () => {
it('renders without errors', () => {
const { getByTestId, getAllByTestId } = render(
<DateRangePicker
startDatePickerProps={startDatePickerTestProps}
endDatePickerProps={endDatePickerTestProps}
/>
)
const dateRangePicker = getByTestId('date-range-picker')
expect(dateRangePicker).toBeInTheDocument()
expect(dateRangePicker).toHaveClass('usa-date-range-picker')
const datePickers = getAllByTestId('date-picker')
expect(datePickers).toHaveLength(2)
const startDatePicker = datePickers[0]
const endDatePicker = datePickers[1]
expect(startDatePicker).toHaveClass('usa-date-range-picker__range-start')
expect(endDatePicker).toHaveClass('usa-date-range-picker__range-end')
const internalInputs = getAllByTestId('date-picker-internal-input')
expect(internalInputs).toHaveLength(2)
const startDatePickerInternalInput = internalInputs[0]
const endDatePickerInternalInput = internalInputs[1]
expect(startDatePickerInternalInput).not.toHaveAttribute('aria-describedby')
expect(endDatePickerInternalInput).not.toHaveAttribute('aria-describedby')
const externalInputs = getAllByTestId('date-picker-external-input')
expect(externalInputs).toHaveLength(2)
const startDatePickerExternalInput = externalInputs[0]
const endDatePickerExternalInput = externalInputs[1]
expect(startDatePickerExternalInput).not.toHaveAttribute('aria-describedby')
expect(endDatePickerExternalInput).not.toHaveAttribute('aria-describedby')
})
it('renders labels when specified', () => {
const { getByTestId, getAllByTestId, queryByText } = render(
<DateRangePicker
startDateLabel="Start Date"
startDatePickerProps={startDatePickerTestProps}
endDateLabel="End Date"
endDatePickerProps={endDatePickerTestProps}
/>
)
const dateRangePicker = getByTestId('date-range-picker')
expect(dateRangePicker).toBeInTheDocument()
expect(dateRangePicker).toHaveClass('usa-date-range-picker')
const datePickers = getAllByTestId('date-picker')
expect(datePickers).toHaveLength(2)
const startDatePicker = datePickers[0]
const endDatePicker = datePickers[1]
expect(startDatePicker).toHaveClass('usa-date-range-picker__range-start')
expect(endDatePicker).toHaveClass('usa-date-range-picker__range-end')
const startDateLabel = queryByText('Start Date')
expect(startDateLabel).toBeInTheDocument()
expect(startDateLabel).toHaveClass('usa-label')
const endDateLabel = queryByText('End Date')
expect(endDateLabel).toBeInTheDocument()
expect(endDateLabel).toHaveClass('usa-label')
const internalInputs = getAllByTestId('date-picker-internal-input')
expect(internalInputs).toHaveLength(2)
const startDatePickerInternalInput = internalInputs[0]
const endDatePickerInternalInput = internalInputs[1]
expect(startDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'start-date-label'
)
expect(endDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'end-date-label'
)
const externalInputs = getAllByTestId('date-picker-external-input')
expect(externalInputs).toHaveLength(2)
const startDatePickerExternalInput = externalInputs[0]
const endDatePickerExternalInput = externalInputs[1]
expect(startDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'start-date-label'
)
expect(endDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'end-date-label'
)
})
it('renders hints when specified', () => {
const { getByTestId, getAllByTestId, queryByText } = render(
<DateRangePicker
startDateHint="start date format: mm/dd/yyyy"
startDatePickerProps={startDatePickerTestProps}
endDateHint="end date format: mm/dd/yyyy"
endDatePickerProps={endDatePickerTestProps}
/>
)
const dateRangePicker = getByTestId('date-range-picker')
expect(dateRangePicker).toBeInTheDocument()
expect(dateRangePicker).toHaveClass('usa-date-range-picker')
expect(getAllByTestId('date-picker')).toHaveLength(2)
const startDateHint = queryByText('start date format: mm/dd/yyyy')
expect(startDateHint).toBeInTheDocument()
expect(startDateHint).toHaveClass('usa-hint')
const endDateHint = queryByText('end date format: mm/dd/yyyy')
expect(endDateHint).toBeInTheDocument()
expect(endDateHint).toHaveClass('usa-hint')
const internalInputs = getAllByTestId('date-picker-internal-input')
expect(internalInputs).toHaveLength(2)
const startDatePickerInternalInput = internalInputs[0]
const endDatePickerInternalInput = internalInputs[1]
expect(startDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'start-date-hint'
)
expect(endDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'end-date-hint'
)
const externalInputs = getAllByTestId('date-picker-external-input')
expect(externalInputs).toHaveLength(2)
const startDatePickerExternalInput = externalInputs[0]
const endDatePickerExternalInput = externalInputs[1]
expect(startDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'start-date-hint'
)
expect(endDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'end-date-hint'
)
})
it('renders labels and hints simultaneously, properly populating the aria-describedby property on each DatePicker', () => {
const { getByTestId, getAllByTestId, queryByText } = render(
<DateRangePicker
startDateLabel="Start Date"
startDateHint="start date format: mm/dd/yyyy"
startDatePickerProps={startDatePickerTestProps}
endDateLabel="End Date"
endDateHint="end date format: mm/dd/yyyy"
endDatePickerProps={endDatePickerTestProps}
/>
)
const dateRangePicker = getByTestId('date-range-picker')
expect(dateRangePicker).toBeInTheDocument()
expect(dateRangePicker).toHaveClass('usa-date-range-picker')
expect(getAllByTestId('date-picker')).toHaveLength(2)
const startDateLabel = queryByText('Start Date')
expect(startDateLabel).toBeInTheDocument()
expect(startDateLabel).toHaveClass('usa-label')
const endDateLabel = queryByText('End Date')
expect(endDateLabel).toBeInTheDocument()
expect(endDateLabel).toHaveClass('usa-label')
const startDateHint = queryByText('start date format: mm/dd/yyyy')
expect(startDateHint).toBeInTheDocument()
expect(startDateHint).toHaveClass('usa-hint')
const endDateHint = queryByText('end date format: mm/dd/yyyy')
expect(endDateHint).toBeInTheDocument()
expect(endDateHint).toHaveClass('usa-hint')
const internalInputs = getAllByTestId('date-picker-internal-input')
expect(internalInputs).toHaveLength(2)
const startDatePickerInternalInput = internalInputs[0]
const endDatePickerInternalInput = internalInputs[1]
expect(startDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'start-date-label start-date-hint'
)
expect(endDatePickerInternalInput).toHaveAttribute(
'aria-describedby',
'end-date-label end-date-hint'
)
const externalInputs = getAllByTestId('date-picker-external-input')
expect(externalInputs).toHaveLength(2)
const startDatePickerExternalInput = externalInputs[0]
const endDatePickerExternalInput = externalInputs[1]
expect(startDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'start-date-label start-date-hint'
)
expect(endDatePickerExternalInput).toHaveAttribute(
'aria-describedby',
'end-date-label end-date-hint'
)
})
it('allows a date range to be selected by using both date pickers to pick start and end dates', () => {
const mockStartDatePickerOnChange = jest.fn()
const mockEndDatePickerOnChange = jest.fn()
const { getAllByTestId, getByText } = render(
<DateRangePicker
startDatePickerProps={{
...startDatePickerTestProps,
defaultValue: '2021-01-20',
onChange: mockStartDatePickerOnChange,
}}
endDatePickerProps={{
...endDatePickerTestProps,
defaultValue: '2021-01-25',
onChange: mockEndDatePickerOnChange,
}}
/>
)
const datePickerButtons = getAllByTestId('date-picker-button')
const startDatePickerButton = datePickerButtons[0]
const endDatePickerButton = datePickerButtons[1]
const calendars = getAllByTestId('date-picker-calendar')
const startDatePickerCalendar = calendars[0]
const endDatePickerCalendar = calendars[1]
const internalInputs = getAllByTestId('date-picker-internal-input')
const startDatePickerInternalInput = internalInputs[0]
const endDatePickerInternalInput = internalInputs[1]
const externalInputs = getAllByTestId('date-picker-external-input')
const startDatePickerExternalInput = externalInputs[0]
const endDatePickerExternalInput = externalInputs[1]
// Select the start date from the first date picker:
userEvent.click(startDatePickerButton)
expect(startDatePickerCalendar).toBeVisible()
const defaultSelectedStartDate = getByText('20')
expect(defaultSelectedStartDate).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--selected usa-date-picker__calendar__date--range-date-start'
)
const newStartDateButton = getByText('21')
expect(newStartDateButton).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--within-range'
)
userEvent.click(newStartDateButton)
expect(startDatePickerExternalInput).toHaveValue('01/21/2021')
expect(startDatePickerInternalInput).toHaveValue('2021-01-21')
expect(startDatePickerExternalInput).toHaveFocus()
expect(startDatePickerCalendar).not.toBeVisible()
expect(mockStartDatePickerOnChange).toHaveBeenCalledWith('01/21/2021')
// Select the end date from the second date picker:
userEvent.click(endDatePickerButton)
expect(endDatePickerCalendar).toBeVisible()
const defaultSelectedEndDate = getByText('25')
expect(defaultSelectedEndDate).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--selected usa-date-picker__calendar__date--range-date-end'
)
const newEndDateButton = getByText('24')
expect(newEndDateButton).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--within-range'
)
userEvent.click(newEndDateButton)
expect(endDatePickerExternalInput).toHaveValue('01/24/2021')
expect(endDatePickerInternalInput).toHaveValue('2021-01-24')
expect(endDatePickerExternalInput).toHaveFocus()
expect(endDatePickerCalendar).not.toBeVisible()
expect(mockEndDatePickerOnChange).toHaveBeenCalledWith('01/24/2021')
})
it('prevents the selection of a date range where the end date is before the start date', () => {
const mockStartDatePickerOnChange = jest.fn()
const mockEndDatePickerOnChange = jest.fn()
const { getAllByTestId, getByText } = render(
<DateRangePicker
startDatePickerProps={{
...startDatePickerTestProps,
defaultValue: '2021-01-20',
onChange: mockStartDatePickerOnChange,
}}
endDatePickerProps={{
...endDatePickerTestProps,
defaultValue: '2021-01-25',
onChange: mockEndDatePickerOnChange,
}}
/>
)
const datePickerButtons = getAllByTestId('date-picker-button')
const startDatePickerButton = datePickerButtons[0]
const endDatePickerButton = datePickerButtons[1]
const calendars = getAllByTestId('date-picker-calendar')
const startDatePickerCalendar = calendars[0]
const endDatePickerCalendar = calendars[1]
// Try to select an start date before the end date:
userEvent.click(startDatePickerButton)
expect(startDatePickerCalendar).toBeVisible()
const defaultSelectedStartDate = getByText('20')
expect(defaultSelectedStartDate).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--selected usa-date-picker__calendar__date--range-date-start'
)
const startDatePickerRangeEnd = getByText('25')
expect(startDatePickerRangeEnd).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--range-date-end'
)
const invalidStartDateButton = getByText('26')
expect(invalidStartDateButton).toHaveClass(
'usa-date-picker__calendar__date'
)
expect(invalidStartDateButton).toBeDisabled()
userEvent.click(invalidStartDateButton)
expect(startDatePickerCalendar).toBeVisible()
expect(mockEndDatePickerOnChange).not.toHaveBeenCalledWith('01/26/2021')
// Try to select an end date before the start date:
userEvent.click(endDatePickerButton)
expect(endDatePickerCalendar).toBeVisible()
const endDatePickerRangeStart = getByText('20')
expect(endDatePickerRangeStart).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--range-date-start'
)
const defaultSelectedEndDate = getByText('25')
expect(defaultSelectedEndDate).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--selected usa-date-picker__calendar__date--range-date-end'
)
const invalidEndDateButton = getByText('19')
expect(invalidEndDateButton).toHaveClass('usa-date-picker__calendar__date')
expect(invalidEndDateButton).toBeDisabled()
userEvent.click(invalidEndDateButton)
expect(endDatePickerCalendar).toBeVisible()
expect(mockEndDatePickerOnChange).not.toHaveBeenCalledWith('01/19/2021')
})
it('clears the range-determining date of the opposite DatePicker when the input is cleared', () => {
const mockStartDatePickerOnChange = jest.fn()
const mockEndDatePickerOnChange = jest.fn()
const { getAllByTestId, getByText } = render(
<DateRangePicker
startDateLabel="Event start date"
startDateHint="mm/dd/yyyy"
startDatePickerProps={{
id: 'event-date-start',
name: 'event-date-start',
defaultValue: '2021-01-20',
onChange: mockStartDatePickerOnChange,
}}
endDateLabel="Event end date"
endDateHint="mm/dd/yyyy"
endDatePickerProps={{
id: 'event-date-end',
name: 'event-date-end',
defaultValue: '2021-01-25',
onChange: mockEndDatePickerOnChange,
}}
/>
)
const datePickerExternalInputs = getAllByTestId(
'date-picker-external-input'
)
const startDatePickerExternalInput = datePickerExternalInputs[0]
const datePickerButtons = getAllByTestId('date-picker-button')
const endDatePickerButton = datePickerButtons[1]
const calendars = getAllByTestId('date-picker-calendar')
const endDatePickerCalendar = calendars[1]
// Verify the end date cannot be selected before the default-selected start date:
userEvent.click(endDatePickerButton)
expect(endDatePickerCalendar).toBeVisible()
const endDatePickerRangeStart = getByText('20')
expect(endDatePickerRangeStart).toHaveClass(
'usa-date-picker__calendar__date usa-date-picker__calendar__date--range-date-start'
)
const invalidEndDateButton = getByText('19')
expect(invalidEndDateButton).toHaveClass('usa-date-picker__calendar__date')
expect(invalidEndDateButton).toBeDisabled()
userEvent.click(invalidEndDateButton)
expect(endDatePickerCalendar).toBeVisible()
expect(mockEndDatePickerOnChange).not.toHaveBeenCalledWith('01/19/2021')
// Close the end date picker calendar:
userEvent.click(endDatePickerButton)
expect(endDatePickerCalendar).not.toBeVisible()
// Clear the start picker input:
userEvent.clear(startDatePickerExternalInput)
// Verify an end date before the previously selected start date can be selected:
userEvent.click(endDatePickerButton)
expect(endDatePickerCalendar).toBeVisible()
const noLongerRangeStart = getByText('20')
expect(noLongerRangeStart).toHaveClass('usa-date-picker__calendar__date')
const previouslyInvalidEndDateButton = getByText('19')
expect(previouslyInvalidEndDateButton).toHaveClass(
'usa-date-picker__calendar__date'
)
expect(previouslyInvalidEndDateButton).not.toBeDisabled()
userEvent.click(previouslyInvalidEndDateButton)
expect(endDatePickerCalendar).not.toBeVisible()
expect(mockEndDatePickerOnChange).toHaveBeenCalledWith('01/19/2021')
})
}) | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/signalRMappers";
import * as Parameters from "../models/parameters";
import { SignalRManagementClientContext } from "../signalRManagementClientContext";
/** Class representing a SignalR. */
export class SignalR {
private readonly client: SignalRManagementClientContext;
/**
* Create a SignalR.
* @param {SignalRManagementClientContext} client Reference to the service client.
*/
constructor(client: SignalRManagementClientContext) {
this.client = client;
}
/**
* Checks that the resource name is valid and is not already in use.
* @param location the region
* @param parameters Parameters supplied to the operation.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRCheckNameAvailabilityResponse>
*/
checkNameAvailability(
location: string,
parameters: Models.NameAvailabilityParameters,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRCheckNameAvailabilityResponse>;
/**
* @param location the region
* @param parameters Parameters supplied to the operation.
* @param callback The callback
*/
checkNameAvailability(
location: string,
parameters: Models.NameAvailabilityParameters,
callback: msRest.ServiceCallback<Models.NameAvailability>
): void;
/**
* @param location the region
* @param parameters Parameters supplied to the operation.
* @param options The optional parameters
* @param callback The callback
*/
checkNameAvailability(
location: string,
parameters: Models.NameAvailabilityParameters,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.NameAvailability>
): void;
checkNameAvailability(
location: string,
parameters: Models.NameAvailabilityParameters,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.NameAvailability>,
callback?: msRest.ServiceCallback<Models.NameAvailability>
): Promise<Models.SignalRCheckNameAvailabilityResponse> {
return this.client.sendOperationRequest(
{
location,
parameters,
options
},
checkNameAvailabilityOperationSpec,
callback
) as Promise<Models.SignalRCheckNameAvailabilityResponse>;
}
/**
* Handles requests to list all resources in a subscription.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRListBySubscriptionResponse>
*/
listBySubscription(
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRListBySubscriptionResponse>;
/**
* @param callback The callback
*/
listBySubscription(callback: msRest.ServiceCallback<Models.SignalRResourceList>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
listBySubscription(
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
listBySubscription(
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRResourceList>,
callback?: msRest.ServiceCallback<Models.SignalRResourceList>
): Promise<Models.SignalRListBySubscriptionResponse> {
return this.client.sendOperationRequest(
{
options
},
listBySubscriptionOperationSpec,
callback
) as Promise<Models.SignalRListBySubscriptionResponse>;
}
/**
* Handles requests to list all resources in a resource group.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRListByResourceGroupResponse>
*/
listByResourceGroup(
resourceGroupName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRListByResourceGroupResponse>;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param callback The callback
*/
listByResourceGroup(
resourceGroupName: string,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(
resourceGroupName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
listByResourceGroup(
resourceGroupName: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRResourceList>,
callback?: msRest.ServiceCallback<Models.SignalRResourceList>
): Promise<Models.SignalRListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback
) as Promise<Models.SignalRListByResourceGroupResponse>;
}
/**
* Get the resource and its properties.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRGetResponse>
*/
get(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRGetResponse>;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param callback The callback
*/
get(
resourceGroupName: string,
resourceName: string,
callback: msRest.ServiceCallback<Models.SignalRResource>
): void;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param options The optional parameters
* @param callback The callback
*/
get(
resourceGroupName: string,
resourceName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRResource>
): void;
get(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRResource>,
callback?: msRest.ServiceCallback<Models.SignalRResource>
): Promise<Models.SignalRGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
resourceName,
options
},
getOperationSpec,
callback
) as Promise<Models.SignalRGetResponse>;
}
/**
* Create or update a resource.
* @param parameters Parameters for the create or update operation
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRCreateOrUpdateResponse>
*/
createOrUpdate(
parameters: Models.SignalRResource,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRCreateOrUpdateResponse> {
return this.beginCreateOrUpdate(
parameters,
resourceGroupName,
resourceName,
options
).then((lroPoller) => lroPoller.pollUntilFinished()) as Promise<
Models.SignalRCreateOrUpdateResponse
>;
}
/**
* Operation to delete a resource.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName, resourceName, options).then((lroPoller) =>
lroPoller.pollUntilFinished()
);
}
/**
* Operation to update an exiting resource.
* @param parameters Parameters for the update operation
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRUpdateResponse>
*/
update(
parameters: Models.SignalRResource,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRUpdateResponse> {
return this.beginUpdate(
parameters,
resourceGroupName,
resourceName,
options
).then((lroPoller) => lroPoller.pollUntilFinished()) as Promise<Models.SignalRUpdateResponse>;
}
/**
* Get the access keys of the resource.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRListKeysResponse>
*/
listKeys(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRListKeysResponse>;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param callback The callback
*/
listKeys(
resourceGroupName: string,
resourceName: string,
callback: msRest.ServiceCallback<Models.SignalRKeys>
): void;
/**
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param options The optional parameters
* @param callback The callback
*/
listKeys(
resourceGroupName: string,
resourceName: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRKeys>
): void;
listKeys(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRKeys>,
callback?: msRest.ServiceCallback<Models.SignalRKeys>
): Promise<Models.SignalRListKeysResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
resourceName,
options
},
listKeysOperationSpec,
callback
) as Promise<Models.SignalRListKeysResponse>;
}
/**
* Regenerate the access key for the resource. PrimaryKey and SecondaryKey cannot be regenerated at
* the same time.
* @param parameters Parameter that describes the Regenerate Key Operation.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRRegenerateKeyResponse>
*/
regenerateKey(
parameters: Models.RegenerateKeyParameters,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRRegenerateKeyResponse> {
return this.beginRegenerateKey(
parameters,
resourceGroupName,
resourceName,
options
).then((lroPoller) => lroPoller.pollUntilFinished()) as Promise<
Models.SignalRRegenerateKeyResponse
>;
}
/**
* Operation to restart a resource.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
restart(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRest.RestResponse> {
return this.beginRestart(resourceGroupName, resourceName, options).then((lroPoller) =>
lroPoller.pollUntilFinished()
);
}
/**
* Create or update a resource.
* @param parameters Parameters for the create or update operation
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginCreateOrUpdate(
parameters: Models.SignalRResource,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
parameters,
resourceGroupName,
resourceName,
options
},
beginCreateOrUpdateOperationSpec,
options
);
}
/**
* Operation to delete a resource.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
resourceName,
options
},
beginDeleteMethodOperationSpec,
options
);
}
/**
* Operation to update an exiting resource.
* @param parameters Parameters for the update operation
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginUpdate(
parameters: Models.SignalRResource,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
parameters,
resourceGroupName,
resourceName,
options
},
beginUpdateOperationSpec,
options
);
}
/**
* Regenerate the access key for the resource. PrimaryKey and SecondaryKey cannot be regenerated at
* the same time.
* @param parameters Parameter that describes the Regenerate Key Operation.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginRegenerateKey(
parameters: Models.RegenerateKeyParameters,
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
parameters,
resourceGroupName,
resourceName,
options
},
beginRegenerateKeyOperationSpec,
options
);
}
/**
* Operation to restart a resource.
* @param resourceGroupName The name of the resource group that contains the resource. You can
* obtain this value from the Azure Resource Manager API or the portal.
* @param resourceName The name of the resource.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginRestart(
resourceGroupName: string,
resourceName: string,
options?: msRest.RequestOptionsBase
): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
resourceName,
options
},
beginRestartOperationSpec,
options
);
}
/**
* Handles requests to list all resources in a subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRListBySubscriptionNextResponse>
*/
listBySubscriptionNext(
nextPageLink: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRListBySubscriptionNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listBySubscriptionNext(
nextPageLink: string,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listBySubscriptionNext(
nextPageLink: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
listBySubscriptionNext(
nextPageLink: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRResourceList>,
callback?: msRest.ServiceCallback<Models.SignalRResourceList>
): Promise<Models.SignalRListBySubscriptionNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listBySubscriptionNextOperationSpec,
callback
) as Promise<Models.SignalRListBySubscriptionNextResponse>;
}
/**
* Handles requests to list all resources in a resource group.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.SignalRListByResourceGroupNextResponse>
*/
listByResourceGroupNext(
nextPageLink: string,
options?: msRest.RequestOptionsBase
): Promise<Models.SignalRListByResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByResourceGroupNext(
nextPageLink: string,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroupNext(
nextPageLink: string,
options: msRest.RequestOptionsBase,
callback: msRest.ServiceCallback<Models.SignalRResourceList>
): void;
listByResourceGroupNext(
nextPageLink: string,
options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.SignalRResourceList>,
callback?: msRest.ServiceCallback<Models.SignalRResourceList>
): Promise<Models.SignalRListByResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByResourceGroupNextOperationSpec,
callback
) as Promise<Models.SignalRListByResourceGroupNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const checkNameAvailabilityOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path:
"subscriptions/{subscriptionId}/providers/Microsoft.SignalRService/locations/{location}/checkNameAvailability",
urlParameters: [Parameters.location, Parameters.subscriptionId],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.NameAvailabilityParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.NameAvailability
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listBySubscriptionOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.SignalRService/signalR",
urlParameters: [Parameters.subscriptionId],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRResourceList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRResourceList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRResource
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listKeysOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/listKeys",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRKeys
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginCreateOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.SignalRResource,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.SignalRResource
},
201: {
bodyMapper: Mappers.SignalRResource
},
202: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {},
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.SignalRResource,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.SignalRResource
},
202: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginRegenerateKeyOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/regenerateKey",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.RegenerateKeyParameters,
required: true
}
},
responses: {
202: {
bodyMapper: Mappers.SignalRKeys
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const beginRestartOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path:
"subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/restart",
urlParameters: [Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.resourceName],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listBySubscriptionNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [Parameters.nextPageLink],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRResourceList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
};
const listByResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [Parameters.nextPageLink],
queryParameters: [Parameters.apiVersion],
headerParameters: [Parameters.acceptLanguage],
responses: {
200: {
bodyMapper: Mappers.SignalRResourceList
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
serializer
}; | the_stack |
import { Injectable } from '@angular/core';
import { CoreSites } from '@services/sites';
import { CoreMimetypeUtils } from '@services/utils/mimetype';
import { CoreWSExternalWarning } from '@services/ws';
import { CoreSite } from '@classes/site';
import { makeSingleton } from '@singletons';
const ROOT_CACHE_KEY = 'mmaFiles:';
/**
* Service to handle my files and site files.
*/
@Injectable({ providedIn: 'root' })
export class AddonPrivateFilesProvider {
// Keep old names for backwards compatibility.
static readonly PRIVATE_FILES_COMPONENT = 'mmaFilesMy';
static readonly SITE_FILES_COMPONENT = 'mmaFilesSite';
/**
* Check if core_user_get_private_files_info WS call is available.
*
* @return Whether the WS is available, false otherwise.
*/
canGetPrivateFilesInfo(): boolean {
return CoreSites.wsAvailableInCurrentSite('core_user_get_private_files_info');
}
/**
* Check if user can view his private files.
*
* @return Whether the user can view his private files.
*/
canViewPrivateFiles(): boolean {
const currentSite = CoreSites.getCurrentSite();
if (!currentSite) {
return false;
}
return currentSite.canAccessMyFiles() && !this.isPrivateFilesDisabledInSite();
}
/**
* Check if user can view site files.
*
* @return Whether the user can view site files.
*/
canViewSiteFiles(): boolean {
return !this.isSiteFilesDisabledInSite();
}
/**
* Check if user can upload private files.
*
* @return Whether the user can upload private files.
*/
canUploadFiles(): boolean {
const currentSite = CoreSites.getCurrentSite();
if (!currentSite) {
return false;
}
return currentSite.canAccessMyFiles() && currentSite.canUploadFiles() && !this.isUploadDisabledInSite();
}
/**
* Get the list of files.
*
* @param params A list of parameters accepted by the Web service.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the files.
*/
async getFiles(params: AddonPrivateFilesGetFilesWSParams, siteId?: string): Promise<AddonPrivateFilesFile[]> {
const site = await CoreSites.getSite(siteId);
const preSets = {
cacheKey: this.getFilesListCacheKey(params),
updateFrequency: CoreSite.FREQUENCY_SOMETIMES,
};
const result: AddonPrivateFilesGetFilesWSResult = await site.read('core_files_get_files', params, preSets);
if (!result.files) {
return [];
}
return result.files.map((entry) => {
entry.fileurl = entry.url;
if (entry.isdir) {
entry.imgPath = CoreMimetypeUtils.getFolderIcon();
} else {
entry.imgPath = CoreMimetypeUtils.getFileIcon(entry.filename);
}
return entry;
});
}
/**
* Get cache key for file list WS calls.
*
* @param params Params of the WS.
* @return Cache key.
*/
protected getFilesListCacheKey(params: AddonPrivateFilesGetFilesWSParams): string {
const root = !params.component ? 'site' : 'my';
return ROOT_CACHE_KEY + 'list:' + root + ':' + params.contextid + ':' + params.filepath;
}
/**
* Get the private files of the current user.
*
* @return Promise resolved with the files.
*/
getPrivateFiles(): Promise<AddonPrivateFilesFile[]> {
return this.getFiles(this.getPrivateFilesRootParams());
}
/**
* Get params to get root private files directory.
*
* @return Params.
*/
protected getPrivateFilesRootParams(): AddonPrivateFilesGetFilesWSParams {
return {
contextid: -1,
component: 'user',
filearea: 'private',
contextlevel: 'user',
instanceid: CoreSites.getCurrentSite()?.getUserId(),
itemid: 0,
filepath: '',
filename: '',
};
}
/**
* Get private files info.
*
* @param userId User ID. If not defined, current user in the site.
* @param siteId Site ID. If not defined, use current site.
* @return Promise resolved with the info.
*/
async getPrivateFilesInfo(userId?: number, siteId?: string): Promise<AddonPrivateFilesGetUserInfoWSResult> {
const site = await CoreSites.getSite(siteId);
userId = userId || site.getUserId();
const params: AddonPrivateFilesGetUserInfoWSParams = {
userid: userId,
};
const preSets = {
cacheKey: this.getPrivateFilesInfoCacheKey(userId),
updateFrequency: CoreSite.FREQUENCY_SOMETIMES,
};
return site.read('core_user_get_private_files_info', params, preSets);
}
/**
* Get the cache key for private files info WS calls.
*
* @param userId User ID.
* @return Cache key.
*/
protected getPrivateFilesInfoCacheKey(userId: number): string {
return this.getPrivateFilesInfoCommonCacheKey() + ':' + userId;
}
/**
* Get the common part of the cache keys for private files info WS calls.
*
* @return Cache key.
*/
protected getPrivateFilesInfoCommonCacheKey(): string {
return ROOT_CACHE_KEY + 'privateInfo';
}
/**
* Get the site files.
*
* @return Promise resolved with the files.
*/
getSiteFiles(): Promise<AddonPrivateFilesFile[]> {
return this.getFiles(this.getSiteFilesRootParams());
}
/**
* Get params to get root site files directory.
*
* @return Params.
*/
protected getSiteFilesRootParams(): AddonPrivateFilesGetFilesWSParams {
return {
contextid: 0,
component: '',
filearea: '',
itemid: 0,
filepath: '',
filename: '',
};
}
/**
* Invalidates list of files in a certain directory.
*
* @param root Root of the directory ('my' for private files, 'site' for site files).
* @param params Params to the directory.
* @param siteId Site ID. If not defined, use current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidateDirectory(root?: 'my' | 'site', params?: AddonPrivateFilesGetFilesWSParams, siteId?: string): Promise<void> {
if (!root) {
return;
}
if (!params) {
if (root === 'site') {
params = this.getSiteFilesRootParams();
} else {
params = this.getPrivateFilesRootParams();
}
}
const site = await CoreSites.getSite(siteId);
await site.invalidateWsCacheForKey(this.getFilesListCacheKey(params));
}
/**
* Invalidates private files info for all users.
*
* @param siteId Site ID. If not defined, use current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidatePrivateFilesInfo(siteId?: string): Promise<void> {
const site = await CoreSites.getSite(siteId);
await site.invalidateWsCacheForKeyStartingWith(this.getPrivateFilesInfoCommonCacheKey());
}
/**
* Invalidates private files info for a certain user.
*
* @param userId User ID. If not defined, current user in the site.
* @param siteId Site ID. If not defined, use current site.
* @return Promise resolved when the data is invalidated.
*/
async invalidatePrivateFilesInfoForUser(userId?: number, siteId?: string): Promise<void> {
const site = await CoreSites.getSite(siteId);
await site.invalidateWsCacheForKey(this.getPrivateFilesInfoCacheKey(userId || site.getUserId()));
}
/**
* Check if Files is disabled in a certain site.
*
* @param siteId Site Id. If not defined, use current site.
* @return Promise resolved with true if disabled, rejected or resolved with false otherwise.
*/
async isDisabled(siteId?: string): Promise<boolean> {
const site = await CoreSites.getSite(siteId);
return this.isDisabledInSite(site);
}
/**
* Check if Files is disabled in a certain site.
*
* @param site Site. If not defined, use current site.
* @return Whether it's disabled.
*/
isDisabledInSite(site: CoreSite): boolean {
site = site || CoreSites.getCurrentSite();
return site.isFeatureDisabled('CoreMainMenuDelegate_AddonPrivateFiles');
}
/**
* Return whether or not the plugin is enabled.
*
* @return True if enabled, false otherwise.
*/
isPluginEnabled(): boolean {
return this.canViewPrivateFiles() || this.canViewSiteFiles() || this.canUploadFiles();
}
/**
* Check if private files is disabled in a certain site.
*
* @param siteId Site Id. If not defined, use current site.
* @return Promise resolved with true if disabled, rejected or resolved with false otherwise.
*/
async isPrivateFilesDisabled(siteId?: string): Promise<boolean> {
const site = await CoreSites.getSite(siteId);
return this.isPrivateFilesDisabledInSite(site);
}
/**
* Check if private files is disabled in a certain site.
*
* @param site Site. If not defined, use current site.
* @return Whether it's disabled.
*/
isPrivateFilesDisabledInSite(site?: CoreSite): boolean {
site = site || CoreSites.getCurrentSite();
return !!site && site.isFeatureDisabled('AddonPrivateFilesPrivateFiles');
}
/**
* Check if site files is disabled in a certain site.
*
* @param siteId Site Id. If not defined, use current site.
* @return Promise resolved with true if disabled, rejected or resolved with false otherwise.
*/
async isSiteFilesDisabled(siteId?: string): Promise<boolean> {
const site = await CoreSites.getSite(siteId);
return this.isSiteFilesDisabledInSite(site);
}
/**
* Check if site files is disabled in a certain site.
*
* @param site Site. If not defined, use current site.
* @return Whether it's disabled.
*/
isSiteFilesDisabledInSite(site?: CoreSite): boolean {
site = site || CoreSites.getCurrentSite();
return !!site && site.isFeatureDisabled('AddonPrivateFilesSiteFiles');
}
/**
* Check if upload files is disabled in a certain site.
*
* @param siteId Site Id. If not defined, use current site.
* @return Promise resolved with true if disabled, rejected or resolved with false otherwise.
*/
async isUploadDisabled(siteId?: string): Promise<boolean> {
const site = await CoreSites.getSite(siteId);
return this.isUploadDisabledInSite(site);
}
/**
* Check if upload files is disabled in a certain site.
*
* @param site Site. If not defined, use current site.
* @return Whether it's disabled.
*/
isUploadDisabledInSite(site?: CoreSite): boolean {
site = site || CoreSites.getCurrentSite();
return !!site && site.isFeatureDisabled('AddonPrivateFilesUpload');
}
/**
* Move a file from draft area to private files.
*
* @param draftId The draft area ID of the file.
* @param siteid ID of the site. If not defined, use current site.
* @return Promise resolved in success, rejected otherwise.
*/
async moveFromDraftToPrivate(draftId: number, siteId?: string): Promise<null> {
const params: AddonPrivateFilesAddUserPrivateFilesWSParams = {
draftid: draftId,
};
const preSets = {
responseExpected: false,
};
const site = await CoreSites.getSite(siteId);
return site.write('core_user_add_user_private_files', params, preSets);
}
/**
* Check the Moodle version in order to check if upload files is working.
*
* @param siteId Site ID. If not defined, use current site.
* @return Promise resolved with true if WS is working, false otherwise.
*/
async versionCanUploadFiles(siteId?: string): Promise<boolean> {
const site = await CoreSites.getSite(siteId);
// Upload private files doesn't work for Moodle 3.1.0 due to a bug.
return site.isVersionGreaterEqualThan('3.1.1');
}
}
export const AddonPrivateFiles = makeSingleton(AddonPrivateFilesProvider);
/**
* File data returned by core_files_get_files.
*/
export type AddonPrivateFilesFile = {
contextid: number;
component: string;
filearea: string;
itemid: number;
filepath: string;
filename: string;
isdir: boolean;
url: string;
timemodified: number;
timecreated?: number; // Time created.
filesize?: number; // File size.
author?: string; // File owner.
license?: string; // File license.
} & AddonPrivateFilesFileCalculatedData;
/**
* Calculated data for AddonPrivateFilesFile.
*/
export type AddonPrivateFilesFileCalculatedData = {
fileurl: string; // File URL, using same name as CoreWSExternalFile.
imgPath?: string; // Path to file icon's image.
};
/**
* Params of WS core_files_get_files.
*/
export type AddonPrivateFilesGetFilesWSParams = {
contextid: number; // Context id Set to -1 to use contextlevel and instanceid.
component: string; // Component.
filearea: string; // File area.
itemid: number; // Associated id.
filepath: string; // File path.
filename: string; // File name.
modified?: number; // Timestamp to return files changed after this time.
contextlevel?: string; // The context level for the file location.
instanceid?: number; // The instance id for where the file is located.
};
/**
* Result of WS core_files_get_files.
*/
export type AddonPrivateFilesGetFilesWSResult = {
parents: {
contextid: number;
component: string;
filearea: string;
itemid: number;
filepath: string;
filename: string;
}[];
files: AddonPrivateFilesFile[];
};
/**
* Params of core_user_get_private_files_info WS.
*/
type AddonPrivateFilesGetUserInfoWSParams = {
userid?: number; // Id of the user, default to current user.
};
/**
* Data returned by core_user_get_private_files_info WS.
*/
export type AddonPrivateFilesGetUserInfoWSResult = {
filecount: number; // Number of files in the area.
foldercount: number; // Number of folders in the area.
filesize: number; // Total size of the files in the area.
filesizewithoutreferences: number; // Total size of the area excluding file references.
warnings?: CoreWSExternalWarning[];
};
/**
* Params of core_user_add_user_private_files WS.
*/
type AddonPrivateFilesAddUserPrivateFilesWSParams = {
draftid: number; // Draft area id.
}; | the_stack |
export interface Variant<Tag extends string = string, Value = undefined> {
readonly tag: Tag
readonly value: Value
}
/**
* Utility type which allows any {@link Variant} to be assigned to it.
*/
export type AnyVariant = Variant<string, unknown>
/**
* Creates a new {@link Variant} instance whose value is undefined.
* @param tag
*/
export function tag<Tag extends string>(tag: Tag): Variant<Tag>
/**
* Creates a new {@link Variant} instance.
* @param tag
* @param value
*/
export function tag<Tag extends string, Value>(tag: Tag, value: Value): Variant<Tag, Value>
export function tag(tag: string, value?: unknown): AnyVariant {
return {
tag,
value,
}
}
/**
* Extracts the value form a @link Variant} instance.
* @param variant
*/
export function untag<Value>(variant: Variant<string, Value>): Value {
return variant.value
}
/**
* Utility type for extracting the possible values for {@link Variant#tag}
* from a union of {@link Variant}s.
*
* @example
* type Union =
* | Variant<"1">
* | Variant<"2">
*
* // Equals: "1" | "2"
* type UnionTags = Tags<Union>
*/
export type Tags<Var extends AnyVariant> = Var["tag"]
/**
* Utility type for extracting the possible types for {@link Variant#value}
* from a union of {@link Variant}s.
*
* @example
* type Union =
* | Variant<"1", string>
* | Variant<"2", number>
*
* // Equals: string | number
* type UnionValues = Values<Union>
*/
export type Values<Var extends AnyVariant> = Var["value"]
/**
* Utility type for narrowing down a union of {@link Variant}s based on their tags.
*
* @example
* type Union =
* | Variant<"1", 1>
* | Variant<"2", 2>
* | Variant<"3", 3>
*
* // Equals: Variant<"1", 1> | Variant<"3", 3>
* type Narrowed = Narrow<Union, "1" | "3">
*/
export type Narrow<Var extends AnyVariant, Tag extends Tags<Var>> = Extract<
Var,
Variant<Tag, unknown>
>
/**
* Type guard for narrowing down the type of a {@link Variant}.
* @param variant
* @param tag
* @example
* type Union =
* | Variant<"1", number>
* | Variant<"2", string>
*
* function doSomething(union: Union) {
* // union.value has type number | string
*
* if (hasTag(union, "1")) {
* // union.value has type number now
* }
* }
*/
export function hasTag<Var extends AnyVariant, Tag extends Tags<Var>>(
variant: Var,
tag: Tag,
): variant is Narrow<Var, Tag> {
return variant.tag === tag
}
/**
* Type of a function which narrows down the type of a given {@link Variant}.
*/
export type Predicate<Tag extends string> = <Var extends AnyVariant>(
variant: Var,
) => variant is Narrow<Var, Tag>
/**
* Factory function for creating a type guard which narrows down the type of a {@link Variant}.
* @param tag
* @example
* type Union =
* | Variant<"1", number>
* | Variant<"2", string>
*
* function doSomething(list: Union[]) {
* // filtered has type Variant<"1", number>[]
* const filtered = list.filter(predicate("1"))
* }
*/
export function predicate<Tag extends string>(tag: Tag): Predicate<Tag> {
return <Var extends AnyVariant>(variant: Var): variant is Narrow<Var, Tag> =>
hasTag(variant, tag)
}
/**
* Symbol for declaring a wildcard case in a {@link match} expression.
*/
export const WILDCARD = Symbol("Match Wildcard")
/**
* Utility type for ensuring that a {@link matchExhaustive} expression covers all cases.
*/
export type CasesExhaustive<Var extends AnyVariant, Ret = unknown> = {
[Tag in Tags<Var>]: (value: Values<Narrow<Var, Tag>>) => Ret
}
/**
* Utility type for enabling a {@link matchWildcard} expression to cover only some cases,
* as long as, a wildcard case is declared for matching the remaining cases.
*/
export type CasesWildcard<Var extends AnyVariant, Ret = unknown> = Partial<
CasesExhaustive<Var, Ret>
> & { [WILDCARD]: () => Ret }
/**
* Utility type for ensuring that a {@link match} expression either covers all cases,
* or contains a wildcard for matching the remaining cases.
*/
export type Cases<Var extends AnyVariant, Ret = unknown> =
| CasesExhaustive<Var, Ret>
| CasesWildcard<Var, Ret>
/**
* Utility type for inferring the return type of a {@link match} expression.
*/
export type CasesReturn<Var extends AnyVariant, C extends Cases<Var>> = C extends Cases<
Var,
infer Ret
>
? Ret
: never
/**
* Internal helper type which accepts any Cases object.
*/
interface AnyCases {
[tag: string]: ((value: unknown) => unknown) | undefined
[WILDCARD]?: () => unknown
}
/**
* Function for matching on the tag of a {@link Variant}.
* All possible cases need to be covered, unless a wildcard case is present.
* @param variant
* @param cases
* @example
* type Union =
* | Variant<"Num", number>
* | Variant<"Str", string>
* | Variant<"Bool", boolean>
*
* function doSomething(union: Union) {
* return match(union, {
* Num: number => number * number,
* Str: string => `Hello, ${string}!`,
* Bool: boolean => !boolean,
* })
* }
*
* function doSomethingElse(union: Union) {
* return match(union, {
* Str: string => `Hello, ${string}!`,
* [WILDCARD]: () => "Hello there!",
* })
* }
* @deprecated Use {@link matchExhaustive} or {@link matchWildcard} instead.
*/
export function match<Var extends AnyVariant, C extends Cases<Var>>(
variant: Var,
cases: C,
): CasesReturn<Var, C>
export function match(variant: AnyVariant, cases: AnyCases): unknown {
const caseFn = cases[variant.tag]
if (caseFn) {
return caseFn(variant.value)
}
const wildcardFn = cases[WILDCARD]
if (wildcardFn) {
return wildcardFn()
}
throw new Error(`No case matched tag ${tag}.`)
}
/**
* Helper type to restrict the possible keys of a type.
*
* This is useful for {@link matchExhaustive} and {@link matchWildcard} where the cases argument
* needs to be generic to infer the correct return type.
* However, due to the argument being generic it is allowed to pass extra properties.
* Passing extra arguments is probably a spelling mistake.
* Therefore, we restrict the properties by setting extra properties to never.
*
* Typescript 4.2 will show a nice hint asking whether you've misspelled the property name.
*/
export type ValidateProperties<T, AllowedProperties extends keyof T> = {
[_ in Exclude<keyof T, AllowedProperties>]: never
}
/**
* Function for matching on the tag of a {@link Variant}.
* All possible cases need to be covered.
* @param variant
* @param cases
* @example
* type Union =
* | Variant<"Num", number>
* | Variant<"Str", string>
* | Variant<"Bool", boolean>
*
* function doSomething(union: Union) {
* return matchExhaustive(union, {
* Num: number => number * number,
* Str: string => `Hello, ${string}!`,
* Bool: boolean => !boolean,
* })
* }
*/
export function matchExhaustive<Var extends AnyVariant, Cases extends CasesExhaustive<Var>>(
variant: Var,
cases: Cases & ValidateProperties<Cases, keyof CasesExhaustive<Var>>,
): CasesReturn<Var, Cases> {
return match(variant, cases)
}
/**
* Function for matching on the tag of a {@link Variant}.
* Not all cases need to be covered, a wildcard case needs to be present.
* @param variant
* @param cases
* @example
* type Union =
* | Variant<"Num", number>
* | Variant<"Str", string>
* | Variant<"Bool", boolean>
*
* function doSomething(union: Union) {
* return matchWildcard(union, {
* Str: string => `Hello, ${string}!`,
* [WILDCARD]: () => "Hello there!",
* })
* }
*/
export function matchWildcard<Var extends AnyVariant, Cases extends CasesWildcard<Var>>(
variant: Var,
cases: Cases & ValidateProperties<Cases, keyof CasesWildcard<Var>>,
): CasesReturn<Var, Cases> {
return match(variant, cases)
}
/**
* Utility function for asserting that all cases have been covered.
* @param variant
* @example
* type Union =
* | Variant<"1", string>
* | Variant<"2", number>
*
* function doSomething(union: Union) {
* switch(union.tag) {
* case "1":
* alert(union.value)
* break
* case "2":
* alert(union.value.toFixed(0))
* break
* default:
* // compile error if we've forgotten a case
* assertNever(union)
* }
* }
*/
export function assertNever(variant: never): never {
throw new Error("Unreachable state reached!")
}
/**
* Type which specifies the constructor for a variant type.
*/
export type Constructor<Tag extends string, Value> = <T extends Value>(
value: Value extends undefined ? T | void : T,
) => Variant<Tag, T>
/**
* Type which specifies the strict constructor for a variant type.
* It does not support generics.
*/
export type StrictConstructor<Tag extends string, Value> = (
value: Value extends undefined ? Value | void : Value,
) => Variant<Tag, Value>
/**
* Type which specifies the extra properties which are attached to a constructor.
*/
export interface ConstructorExtra<Tag extends string> {
tag: Tag
is: Predicate<Tag>
}
/**
* Type which specifies the constructor for a variant type with attached type guard.
*/
export type ConstructorWithExtra<Tag extends string, Value> = Constructor<Tag, Value> &
ConstructorExtra<Tag>
/**
* Type which specifies the strict constructor for a variant type with attached type guard.
* It does not support generics.
*/
export type StrictConstructorWithExtra<Tag extends string, Value> = StrictConstructor<Tag, Value> &
ConstructorExtra<Tag>
/**
* Function for creating a constructor for the given variant.
*
* In case the variant type uses unconstrained generics,
* pass unknown as its type arguments.
*
* In case the variant type uses constrained generics,
* pass the constraint type as its type arguments.
*
* Use {@link impl} instead if your environment has support for {@link Proxy}.
*
* @example
* type Result<T, E> =
* | Variant<"Ok", T>
* | Variant<"Err", E>
*
* const Ok = constructor<Result<unknown, unknown>, "Ok">("Ok")
* const Err = constructor<Result<unknown, unknown>, "Err">("Err")
*
* let result: Result<number, string>
* result = Ok(42)
* result = Err("Something went wrong")
*
* Ok.is(result) // false
* Err.is(result) // true
*
* Ok.tag // "Ok"
* Err.tag // "Err"
*/
export function constructor<Var extends AnyVariant, Tag extends Tags<Var>>(
tagName: Tag,
): ConstructorWithExtra<Tag, Values<Narrow<Var, Tag>>> {
function constructor<T>(value: T) {
return tag(tagName, value)
}
constructor.tag = tagName
constructor.is = predicate(tagName)
return constructor
}
/**
* Same as {@link constructor}, but does not support generics.
* @param tagName
*/
export function strictConstructor<Var extends AnyVariant, Tag extends Tags<Var>>(
tagName: Tag,
): StrictConstructorWithExtra<Tag, Values<Narrow<Var, Tag>>> {
return constructor(tagName)
}
/**
* Type which specifies constructors and type guards for a variant type.
*/
export type Impl<Var extends AnyVariant> = {
[Tag in Tags<Var>]: ConstructorWithExtra<Tag, Values<Narrow<Var, Tag>>>
}
/**
* Type which specifies strict constructors and type guards for a variant type.
* It does not support generics.
*/
export type StrictImpl<Var extends AnyVariant> = {
[Tag in Tags<Var>]: StrictConstructorWithExtra<Tag, Values<Narrow<Var, Tag>>>
}
/**
* Function for generating an implementation for the given variants.
*
* In case the variant type uses unconstrained generics,
* pass unknown as its type arguments.
*
* In case the variant type uses constrained generics,
* pass the constraint type as its type arguments.
*
* @example
* type Result<T, E> =
* | Variant<"Ok", T>
* | Variant<"Err", E>
*
* const {Ok, Err} = impl<Result<unknown, unknown>>()
*
* let result: Result<number, string>
* result = Ok(42)
* result = Err("Something went wrong")
*
* Ok.is(result) // false
* Err.is(result) // true
*
* Ok.tag // "Ok"
* Err.tag // "Err"
*/
export function impl<Var extends AnyVariant>(): Impl<Var> {
return new Proxy({} as Impl<Var>, {
get: <Tag extends keyof Impl<Var>>(_: Impl<Var>, tagName: Tag) => {
return constructor<Var, Tag>(tagName)
},
})
}
/**
* Same as {@link impl}, but does not support generics.
*/
export function strictImpl<Var extends AnyVariant>(): StrictImpl<Var> {
return impl()
} | the_stack |
import { MosaicComponent, MosaicOptions, ViewFunction, KeyedArray, InjectionPoint } from './options';
import Observable from './observable';
import Router from './router';
import Portfolio from './portfolio';
import { randomKey, nodeMarker, goUpToConfigureRouter, applyMixin, runLifecycle } from './util';
import { getTemplate, _repaint } from './templating';
export default function Mosaic(options: MosaicOptions): MosaicComponent {
// Configure some basic properties.
const copyOptions = Object.assign({}, options);
const tid: string = randomKey();
// Error checking.
if(typeof copyOptions.name !== 'string')
throw new Error('Name must be specified and must be a string.');
if((copyOptions as any).descendants)
throw new Error('You cannot directly set the "descendants" property on a component.');
// Define the custom element.
customElements.define(copyOptions.name, class extends MosaicComponent {
constructor() {
super();
// Setup initial Mosaic properties.
this.initiallyRendered = false;
this.tid = tid;
this.iid = randomKey();
this.data = new Observable(Object.assign({}, copyOptions.data || {}), old => {
if(this.barrier === true) return;
runLifecycle('willUpdate', this, old);
}, () => {
if(this.barrier === true) return;
this.repaint();
runLifecycle('updated', this);
});
// Configure all of the properties if they exist.
let _options = Object.keys(copyOptions);
for(let i = 0; i < _options.length; i++) {
let key = _options[i];
if(key === 'element') continue;
else if(key === 'data') continue;
else this[key] = options[key];
}
// Apply any mixins that are present in the options.
if(copyOptions.mixins) {
for(let i = 0; i < copyOptions.mixins.length; i++) {
this.barrier = true;
const mixin = copyOptions.mixins[i];
applyMixin(this, mixin);
this.barrier = false;
}
}
// See if you need to attach the shadow dom based on the options.
if(copyOptions.useShadow === true)
this._shadow = this.attachShadow({ mode: 'open' });
// Adoptable stylesheets.
// TODO: The array of stylesheets should be dynamic, so when you
// add/remove from the array it should trigegr a repaint.
if(copyOptions.stylesheets && this._shadow) {
let sheets: CSSStyleSheet[] = [];
for(let i = 0; i < copyOptions.stylesheets.length; i++) {
const ss = copyOptions.stylesheets[i];
if(ss instanceof CSSStyleSheet)
sheets.push(ss);
else if(typeof ss === 'string') {
const sheet = new CSSStyleSheet();
(sheet as any).replaceSync(ss);
sheets.push(sheet);
}
}
(this._shadow as any).adoptedStyleSheets = sheets;
}
}
connectedCallback() {
// 1.) Remove any child nodes and save them as to the descendants
// property so that it can optionally be used later on.
if(!this.initiallyRendered) {
if(this.childNodes.length !== 0)
this.descendants.append(...this.childNodes);
}
// 2.) Add portfolio dependency.
if(this.portfolio) this.portfolio.addDependency(this);
// 3.) Clear any existing content that was in there before.
if(!this.initiallyRendered) this.innerHTML = '';
// 4.) Make sure we have the router property.
goUpToConfigureRouter.call(this);
// 5.) Find the template for this component, clone it, and repaint.
const template = getTemplate(this);
const cloned = document.importNode(template.content, true);
if(!this.initiallyRendered) {
if(this._shadow) this._shadow.appendChild(cloned);
else this.appendChild(cloned);
}
this.repaint();
// 6.) If there are any attributes present on this element at
// connection time and they are not dynamic (i.e. their value does
// not match the nodeMarker) then you can receive them as data.
if(this.initiallyRendered === false) {
let receivedAttributes = {};
let receivedData = {};
for(let i = 0; i < this.attributes.length; i++) {
const { name, value } = this.attributes[i];
if(value === nodeMarker) continue;
if(this.data.hasOwnProperty(name)) receivedData[name] = value;
else receivedAttributes[name] = value;
}
// Send the attributes through lifecycle functions.
if(Object.keys(receivedAttributes).length > 0)
runLifecycle('received', this, receivedAttributes);
// 7.) Save the new data and repaint.
if(Object.keys(receivedData).length > 0) {
this.barrier = true;
const keys = Object.keys(receivedData);
for(let i = 0; i < keys.length; i++) {
const key = keys[i];
// If the attribute type is a string, but the initial
// value in the component is something else, try to
// parse it as such.
if(typeof receivedData[key] === 'string') {
if(typeof this.data[key] === 'number')
this.data[key] = parseFloat(receivedData[key]);
else if(typeof this.data[key] === 'bigint')
this.data[key] = parseInt(receivedData[key]);
else if(typeof this.data[key] === 'boolean')
this.data[key] = receivedData[key] === 'true' ? true : false;
else if(Array.isArray(this.data[key])) {
const condensed = receivedData[key].replace(/'/gi, '"');
const parsed = JSON.parse(condensed);
this.data[key] = parsed;
} else if(typeof this.data[key] === 'object')
this.data[key] = JSON.parse(receivedData[key]);
else
this.data[key] = receivedData[key];
} else {
this.data[key] = receivedData[key];
}
}
this.barrier = false;
this.repaint();
}
}
// 8.) If you come here as a OTT from an array, then be sure to
// repaint again. This is because with the way that the keyed
// array patcher is currently set up, it will insert all the
// nodes from a fragment (i.e. not in the DOM yet).
if(this.hasOwnProperty('arrayOTT') && this.view) {
const ott = this['arrayOTT'];
const node = ott.instance;
const mems = ott.memories;
const vals = ott.values;
_repaint(node, mems, [], vals, true);
}
// 9.) Make sure the component knows that it has been fully rendered
// for the first time. This makes the router work. Then call the
// created lifecycle function.
runLifecycle('created', this);
this.initiallyRendered = true;
}
disconnectedCallback() {
if(this.portfolio) this.portfolio.removeDependency(this);
runLifecycle('willDestroy', this);
}
paint(arg?: string|HTMLElement|Object) {
let isElement: boolean = typeof arg === 'string' || arg instanceof HTMLElement;
let look: InjectionPoint = copyOptions.element || (this as any).element;
// Check if the user is injecting into the base element here.
if(isElement) {
if(typeof arg === 'string') look = document.getElementById(arg);
else if(arg instanceof HTMLElement) look = arg;
}
// Look for an injection of data.
else if(typeof arg === 'object') {
this.barrier = true;
let keys = Object.keys(arg);
for(let i = 0; i < keys.length; i++) {
const key = keys[i];
const val = arg[key];
this.data[key] = val;
}
this.barrier = false;
}
// Paint into the base element.
let element = typeof look === 'string' ? document.getElementById(look) : look;
if(!element)
throw new Error(`Could not find the base element: ${copyOptions.element}.`);
element.appendChild(this);
}
repaint() {
const template = getTemplate(this);
const memories = (template as any).memories;
if(!this.view) return;
const newValues = this.view(this).values;
const repaintNode = this._shadow ? this._shadow : this;
_repaint(repaintNode, memories, this.oldValues, newValues);
this.oldValues = newValues;
}
set(data: {}) {
this.barrier = true;
const keys = Object.keys(data);
for(let i = 0; i < keys.length; i++) {
const key = keys[i];
this.data[key] = data[key];
}
this.barrier = false;
this.repaint();
runLifecycle('updated', this);
}
setAttribute(qualifiedName: string, value: any) {
super.setAttribute(qualifiedName, value);
// Overload the setAttribute function so that people
// using Mosaic components a DOM nodes can still have
// the "received" lifecycle function called.
let obj = {};
obj[qualifiedName] = value;
runLifecycle('received', this, obj);
}
});
const component = document.createElement(copyOptions.name);
return component as MosaicComponent;
}
/** A function for efficiently rendering a list in a component. */
Mosaic.list = function(items: any[], key: Function, map: Function): KeyedArray {
const keys = items.map((itm, index) => key(itm, index));
const mapped = items.map((itm, index) => {
return {
...map(itm, index),
key: keys[index]
}
});
const stringified = mapped.map(json => JSON.stringify(json));
return { keys, items: mapped, stringified, __isKeyedArray: true };
}
declare global {
interface Window {
Mosaic: typeof Mosaic;
}
}
const html = (strings, ...values): ViewFunction => ({ strings, values, __isTemplate: true });
window.Mosaic = Mosaic;
export { html, Router, Portfolio }; | the_stack |
import test from "tape";
import { aggregate } from "../../src";
import * as support from "../support";
support.runTest("Type operators", {
$toString: [
[true, "true"],
[false, "false"],
[2.5, "2.5"],
[new Date("2018-03-27T16:58:51.538Z"), "2018-03-27T16:58:51.538Z"],
],
$convert: [
// bool
[{ input: true, to: "bool" }, true],
[{ input: false, to: "bool" }, false],
[{ input: 1.99999, to: "bool" }, true],
[{ input: 5, to: "bool" }, true],
[{ input: 0, to: "bool" }, false],
[{ input: 100, to: "bool" }, true],
[{ input: new Date("2018-03-26T04:38:28.044Z"), to: "bool" }, true],
[{ input: "hello", to: "bool" }, true],
[{ input: "false", to: "bool" }, true],
[{ input: "", to: "bool" }, true],
[{ input: null, to: "bool" }, null],
// int/long
[{ input: true, to: "int" }, 1],
[{ input: false, to: "int" }, 0],
[{ input: 1.99999, to: "int" }, 1],
[{ input: 5.5, to: "int" }, 5],
[{ input: 9223372036000.0, to: "int" }, "error", { err: true }],
[{ input: 9223372036854775808.0, to: "long" }, "error", { err: true }],
[
{
input: 9223372036000.0,
to: "int",
onError: "Could not convert to type integer.",
},
"Could not convert to type integer.",
],
[{ input: 5000, to: "int" }, 5000],
[{ input: 922337203600, to: "int" }, "error", { err: true }],
[{ input: "-2", to: "int" }, -2],
[{ input: "2.5", to: "int" }, "error", { err: true }],
[{ input: null, to: "int" }, null],
[{ input: "5e2", to: "long" }, 500],
// decimal / double
[{ input: true, to: "decimal" }, 1],
[{ input: false, to: "decimal" }, 0],
[{ input: "2.5", to: "decimal" }, 2.5],
[{ input: 5, to: "decimal" }, 5],
[{ input: 10000, to: "decimal" }, 10000],
[{ input: "-5.5", to: "decimal" }, -5.5],
[
{ input: new Date("2018-03-27T05:04:47.890Z"), to: "decimal" },
1522127087890,
],
[{ input: "5e10", to: "double" }, 50000000000],
// date
[{ input: 120000000000.5, to: "date" }, new Date("1973-10-20T21:20:00Z")],
[{ input: 1253372036000.5, to: "date" }, new Date("2009-09-19T14:53:56Z")],
[{ input: 1100000000000, to: "date" }, new Date("2004-11-09T11:33:20Z")],
[{ input: -1100000000000, to: "date" }, new Date("1935-02-22T12:26:40Z")],
[{ input: "2018-03-03", to: "date" }, new Date("2018-03-03T00:00:00Z")],
[
{ input: "2018-03-20 11:00:06 +0500", to: "date" },
new Date("2018-03-20T06:00:06Z"),
],
[{ input: "Friday", to: "date" }, "error", { err: true }],
[
{
input: "Friday",
to: "date",
onError: "Could not convert to type date.",
},
"Could not convert to type date.",
],
// string
[{ input: true, to: "string" }, "true"],
[{ input: false, to: "string" }, "false"],
[{ input: 2.5, to: "string" }, "2.5"],
[{ input: 2, to: "string" }, "2"],
[{ input: 1000, to: "string" }, "1000"],
[
{ input: new Date("2018-03-27T16:58:51.538Z"), to: "string" },
"2018-03-27T16:58:51.538Z",
],
],
$isNumber: [
[-0.1, true],
[0, true],
[1, true],
[1.1, true],
["0", false],
[NaN, false],
[null, false],
[undefined, false],
],
});
test("Type Conversion: $toBool", (t) => {
const data = [
{ _id: 1, item: "apple", qty: 5, shipped: true },
{ _id: 2, item: "pie", qty: 10, shipped: 0 },
{ _id: 3, item: "ice cream", shipped: 1 },
{ _id: 4, item: "almonds", qty: 2, shipped: "true" },
{ _id: 5, item: "pecans", shipped: "false" }, // Note: All strings convert to true
{ _id: 6, item: "nougat", shipped: "" }, // Note: All strings convert to true
];
// Define stage to add convertedShippedFlag field with the converted shipped value
// Because all strings convert to true, include specific handling for "false" and ""
const shippedConversionStage = {
$addFields: {
convertedShippedFlag: {
$switch: {
branches: [
{ case: { $eq: ["$shipped", "false"] }, then: false },
{ case: { $eq: ["$shipped", ""] }, then: false },
],
default: { $toBool: "$shipped" },
},
},
},
};
// Define stage to filter documents and pass only the unshipped orders
const unshippedMatchStage = { $match: { convertedShippedFlag: false } };
const result = aggregate(data, [shippedConversionStage, unshippedMatchStage]);
t.deepEqual(
result,
[
{ _id: 2, item: "pie", qty: 10, shipped: 0, convertedShippedFlag: false },
{ _id: 5, item: "pecans", shipped: "false", convertedShippedFlag: false },
{ _id: 6, item: "nougat", shipped: "", convertedShippedFlag: false },
],
"can apply $toBool"
);
t.end();
});
test("Type Conversion: $toLong", (t) => {
const data = [
{ _id: 1, item: "apple", qty: 5 },
{ _id: 2, item: "pie", qty: "100" },
{ _id: 3, item: "ice cream", qty: 500 },
{ _id: 4, item: "almonds", qty: "50" },
];
const result = aggregate(data, [
{ $addFields: { convertedQty: { $toLong: "$qty" } } },
]);
t.deepEqual(
result,
[
{ _id: 1, item: "apple", qty: 5, convertedQty: 5 },
{ _id: 2, item: "pie", qty: "100", convertedQty: 100 },
{ _id: 3, item: "ice cream", qty: 500, convertedQty: 500 },
{ _id: 4, item: "almonds", qty: "50", convertedQty: 50 },
],
"can apply $toLong"
);
t.end();
});
test("Type Conversion Operators", (t) => {
let result = aggregate(
[
{ _id: 1, item: "apple", qty: 5, zipcode: 12345 },
{ _id: 2, item: "pie", qty: 10, zipcode: 11111 },
{ _id: 3, item: "ice cream", zipcode: "12345" },
{ _id: 4, item: "almonds", qty: 2, zipcode: "12345-0030" },
],
[
{ $addFields: { convertedZipCode: { $toString: "$zipcode" } } },
// Define stage to sort documents by the converted zipcode
{ $sort: { convertedZipCode: 1 } },
]
);
t.deepEqual(
[
{
_id: 2,
item: "pie",
qty: 10,
zipcode: 11111,
convertedZipCode: "11111",
},
{
_id: 1,
item: "apple",
qty: 5,
zipcode: 12345,
convertedZipCode: "12345",
},
{
_id: 3,
item: "ice cream",
zipcode: "12345",
convertedZipCode: "12345",
},
{
_id: 4,
item: "almonds",
qty: 2,
zipcode: "12345-0030",
convertedZipCode: "12345-0030",
},
],
result,
"can apply $toString operator"
);
// Testing $toInt, $toLong, $toDouble, $toDecimal
result = aggregate(
[
{ _id: 1, item: "apple", qty: 5, price: 10 },
{ _id: 2, item: "pie", qty: 10, price: 20.0 },
{ _id: 3, item: "ice cream", qty: 2, price: "4.99" },
{ _id: 4, item: "almonds", qty: 5, price: 5 },
],
[
// Define stage to add convertedPrice and convertedQty fields with the converted price and qty values
{
$addFields: {
convertedPrice: { $toDecimal: "$price" },
convertedQty: { $toInt: "$qty" },
},
},
// Define stage to calculate total price by multiplying convertedPrice and convertedQty fields
{
$project: {
item: 1,
totalPrice: { $multiply: ["$convertedPrice", "$convertedQty"] },
},
},
]
);
t.deepEqual(
[
{ _id: 1, item: "apple", totalPrice: 50.0 },
{ _id: 2, item: "pie", totalPrice: 200.0 },
{ _id: 3, item: "ice cream", totalPrice: 9.98 },
{ _id: 4, item: "almonds", totalPrice: 25.0 },
],
result,
"can apply $toInt/$toLong and $toDouble/$toDecimal"
);
result = aggregate(
[
{ _id: 1, item: "apple", qty: 5, order_date: new Date("2018-03-10") },
{ _id: 2, item: "pie", qty: 10, order_date: new Date("2018-03-12") },
{
_id: 3,
item: "ice cream",
qty: 2,
price: "4.99",
order_date: "2018-03-05",
},
{ _id: 4, item: "almonds", qty: 5, price: 5, order_date: "2018-03-05" },
],
[
// Define stage to add convertedDate field with the converted order_date value
{ $addFields: { convertedDate: { $toDate: "$order_date" } } },
// Define stage to sort documents by the converted date
{ $sort: { convertedDate: 1 } },
]
);
t.deepEqual(
result,
[
{
_id: 3,
item: "ice cream",
qty: 2,
price: "4.99",
order_date: "2018-03-05",
convertedDate: new Date("2018-03-05T00:00:00Z"),
},
{
_id: 4,
item: "almonds",
qty: 5,
price: 5,
order_date: "2018-03-05",
convertedDate: new Date("2018-03-05T00:00:00Z"),
},
{
_id: 1,
item: "apple",
qty: 5,
order_date: new Date("2018-03-10T00:00:00Z"),
convertedDate: new Date("2018-03-10T00:00:00Z"),
},
{
_id: 2,
item: "pie",
qty: 10,
order_date: new Date("2018-03-12T00:00:00Z"),
convertedDate: new Date("2018-03-12T00:00:00Z"),
},
],
"can apply $toDate"
);
// Test $convert operator
result = aggregate(
[
{ _id: 1, item: "apple", qty: 5, price: 10 },
{ _id: 2, item: "pie", qty: 10, price: Number("20.0") },
{ _id: 3, item: "ice cream", qty: 2, price: "4.99" },
{ _id: 4, item: "almonds" },
{ _id: 5, item: "bananas", qty: 5000000000, price: Number("1.25") },
],
[
// Define stage to add convertedPrice and convertedQty fields with the converted price and qty values
// If price or qty values are missing, the conversion returns a value of decimal value or int value of 0.
// If price or qty values cannot be converted, the conversion returns a string
{
$addFields: {
convertedPrice: {
$convert: {
input: "$price",
to: "decimal",
onError: "Error",
onNull: Number("0"),
},
},
convertedQty: {
$convert: {
input: "$qty",
to: "int",
onError: {
$concat: [
"Could not convert ",
{ $toString: "$qty" },
" to type integer.",
],
},
onNull: Number("0"),
},
},
},
},
// calculate total price
{
$project: {
totalPrice: {
$switch: {
branches: [
{
case: { $eq: [{ $type: "$convertedPrice" }, "string"] },
then: "NaN",
},
{
case: { $eq: [{ $type: "$convertedQty" }, "string"] },
then: "NaN",
},
],
default: { $multiply: ["$convertedPrice", "$convertedQty"] },
},
},
},
},
]
);
t.deepEqual(
result,
[
{ _id: 1, totalPrice: Number("50.0000000000000") },
{ _id: 2, totalPrice: Number("200.0") },
{ _id: 3, totalPrice: Number("9.98") },
{ _id: 4, totalPrice: Number("0") },
{ _id: 5, totalPrice: "NaN" },
],
"can apply $convert"
);
t.end();
}); | the_stack |
import Bot from 'keybase-bot'
import {BotConfig} from './bot-config'
import * as Errors from './errors'
import * as ChatTypes from 'keybase-bot/lib/types/chat1'
// namespace: jirabot-v1-team-[teamname]; key: jiraConfig
export type TeamJiraConfig = Readonly<{
jiraHost: string
jiraAuth: Readonly<{
consumerKey: string
publicKey: string
privateKey: string
}>
}>
// namespace: jirabot-v1-team-[teamname]; key: user-[keybase username]
export type TeamUserConfig = Readonly<{
jiraAccountID: string
accessToken: string
tokenSecret: string
}>
// namespace: jirabot-v1-team-[teamname]; key: channel-[conversationId]
export type TeamChannelConfig = Readonly<{
defaultNewIssueProject?: string
}>
export const emptyTeamChannelConfig: TeamChannelConfig = {
defaultNewIssueProject: undefined,
}
export type TeamJiraSubscription = {
conversationId: string
webhookURI: string // needed for unsubscribing
urlToken: string
jql: string
withUpdates: boolean
}
export type TeamJiraSubscriptions = Readonly<
Map<
number, // subscription ID that has nothing to do with webhookId
TeamJiraSubscription
>
>
// this is the value. key is urlToken
export type JiraSubscriptionIndex = Readonly<{
teamname: string
id: number
}>
const getNamespace = (teamname: string): string => `jirabot-v1-team-${teamname}`
const jiraSubscriptionIndexNamespace = 'jirabot-v1-subscription-index'
const jiraConfigKey = 'jiraConfig'
const getTeamUserConfigKey = (username: string) => `user-${username}`
const getTeamChannelConfigKey = (conversationId: ChatTypes.ConvIDStr) =>
`channel-${conversationId}`
const jiraSubscriptionsKey = 'jiraSubscriptions'
const jsonToTeamJiraConfig = (
objectFromJson: any
): TeamJiraConfig | undefined => {
if (
typeof objectFromJson.jiraHost !== 'string' ||
!objectFromJson.jiraAuth ||
typeof objectFromJson.jiraAuth.consumerKey !== 'string' ||
typeof objectFromJson.jiraAuth.publicKey !== 'string' ||
typeof objectFromJson.jiraAuth.privateKey !== 'string'
) {
return undefined
}
return {
jiraHost: objectFromJson.jiraHost,
jiraAuth: {
consumerKey: objectFromJson.jiraAuth.consumerKey,
publicKey: objectFromJson.jiraAuth.publicKey,
privateKey: objectFromJson.jiraAuth.privateKey,
},
} as TeamJiraConfig
}
const jsonToTeamUserConfig = (
objectFromJson: any
): TeamUserConfig | undefined => {
const {jiraAccountID, accessToken, tokenSecret} = objectFromJson
if (
typeof jiraAccountID !== 'string' ||
typeof accessToken !== 'string' ||
typeof tokenSecret !== 'string'
) {
return undefined
}
return {
jiraAccountID,
accessToken,
tokenSecret,
} as TeamUserConfig
}
const jsonToTeamChannelConfig = (
objectFromJson: any
): TeamChannelConfig | undefined => {
const {defaultNewIssueProject} = objectFromJson
if (
typeof defaultNewIssueProject !== 'undefined' &&
typeof defaultNewIssueProject !== 'string'
) {
return undefined
}
return {
defaultNewIssueProject,
} as TeamChannelConfig
}
const jsonToTeamJiraSubscriptions = (
objectFromJson: any
): TeamJiraSubscriptions | undefined => {
if (!Array.isArray(objectFromJson)) {
return undefined
}
const subscriptions = new Map<number, TeamJiraSubscription>()
objectFromJson.forEach(([key, value]) => {
if (
typeof key !== 'number' ||
typeof value !== 'object' ||
typeof value.conversationId !== 'string' ||
typeof value.webhookURI !== 'string' ||
typeof value.urlToken !== 'string' ||
typeof value.jql !== 'string' ||
!['boolean', 'undefined'].includes(typeof value.withUpdates)
) {
return
}
subscriptions.set(key, {
conversationId: value.conversationId,
webhookURI: value.webhookURI,
urlToken: value.urlToken,
jql: value.jql,
withUpdates: !!value.withUpdates,
})
})
return subscriptions
}
const jsonToJiraSubscriptionIndex = (
objectFromJson: any
): JiraSubscriptionIndex | undefined => {
const {teamname, id} = objectFromJson
if (typeof teamname !== 'string' || typeof id !== 'number') {
return undefined
}
return {
teamname,
id,
} as JiraSubscriptionIndex
}
const teamJiraSubscriptionsToJson = (
teamJiraSubscriptions: TeamJiraSubscriptions
): string => JSON.stringify([...teamJiraSubscriptions.entries()])
export type CachedConfig<T> = Readonly<{
_revision: number
_timestamp: number
config: T
}>
type ConfigCache<T> = Map<string, CachedConfig<T>>
const cacheTimeout = 1000 * 60 // 1min
const cachedConfigExpired = <T>(cc?: CachedConfig<T>) =>
!cc || Date.now() - cc._timestamp > cacheTimeout
const getCacheKey = (namespace: string, entryKey: string) =>
`${namespace}:${entryKey}`
export default class Configs {
// TODO: purge cache if RAM consumption is too high
private cache = {
teamJiraConfigs: new Map<string, CachedConfig<TeamJiraConfig>>(),
teamUserConfigs: new Map<string, CachedConfig<TeamUserConfig>>(),
teamChannelConfigs: new Map<string, CachedConfig<TeamChannelConfig>>(),
teamJiraSubscriptions: new Map<
string,
CachedConfig<TeamJiraSubscriptions>
>(),
jiraSubscriptionIndex: new Map<
string,
CachedConfig<JiraSubscriptionIndex>
>(),
}
private bot: Bot
private botConfig: BotConfig
constructor(bot: Bot, botConfig: BotConfig) {
this.bot = bot
this.botConfig = botConfig
}
private async getFromCacheOrKVStore<T>(
configCache: ConfigCache<T>,
namespace: string,
entryKey: string,
jsonToConfigMapper: (objectFromJson: any) => T | undefined
): Promise<
Errors.ResultOrError<
CachedConfig<T>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
try {
const cacheKey = getCacheKey(namespace, entryKey)
const cached = configCache.get(cacheKey)
if (!cachedConfigExpired(cached)) {
return Errors.makeResult<CachedConfig<T>>(cached)
}
const res = await this.bot.kvstore.get(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
namespace,
entryKey
)
if (!res.entryValue) {
return Errors.kvStoreNotFoundError
}
let objectFromJson: Object
try {
objectFromJson = JSON.parse(res.entryValue)
} catch (e) {
await this.bot.kvstore.delete(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
namespace,
entryKey
)
configCache.delete(cacheKey)
return Errors.kvStoreNotFoundError
}
const config = jsonToConfigMapper(objectFromJson)
if (!config) {
configCache.delete(cacheKey)
return Errors.makeError({type: Errors.ErrorType.KVStoreNotFound})
}
const cachedConfig = {
config,
_revision: res.revision,
_timestamp: Date.now(),
}
configCache.set(cacheKey, cachedConfig)
return Errors.makeResult<CachedConfig<T>>(cachedConfig)
} catch (err) {
return Errors.makeUnknownError(err)
}
}
private async updateToCacheAndKVStore<T>(
configCache: ConfigCache<T>,
namespace: string,
entryKey: string,
oldConfig: CachedConfig<T> | undefined,
newConfig: T,
configSerializer?: (config: T) => string
): Promise<
Errors.ResultOrError<
undefined,
Errors.KVStoreRevisionError | Errors.UnknownError
>
> {
try {
const entryValue = configSerializer
? configSerializer(newConfig)
: JSON.stringify(newConfig)
const res = await this.bot.kvstore.put(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
namespace,
entryKey,
entryValue,
oldConfig ? oldConfig._revision + 1 : undefined
)
// TODO if revision error, purge cached entry
configCache.set(getCacheKey(namespace, entryKey), {
config: newConfig,
_revision: res.revision,
_timestamp: Date.now(),
})
return Errors.makeResult(undefined)
} catch (err) {
// TODO check and return KVStoreRevisionError
return Errors.makeUnknownError(err)
}
}
public async clearAllForTest(): Promise<any> {
const teamname = `${this.botConfig.keybase.username},${this.botConfig.keybase.username}`
this.bot.kvstore
.listNamespaces(teamname)
.then(res =>
res.namespaces?.forEach(namespace =>
this.bot.kvstore
.listEntryKeys(teamname, namespace)
.then(res =>
res.entryKeys?.forEach(({entryKey}) =>
this.bot.kvstore.delete(teamname, namespace, entryKey)
)
)
)
)
}
async getTeamJiraConfig(
teamname: string
): Promise<
Errors.ResultOrError<
CachedConfig<TeamJiraConfig>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
return await this.getFromCacheOrKVStore(
this.cache.teamJiraConfigs,
getNamespace(teamname),
jiraConfigKey,
jsonToTeamJiraConfig
)
}
async getTeamUserConfig(
teamname: string,
username: string
): Promise<
Errors.ResultOrError<
CachedConfig<TeamUserConfig>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
return await this.getFromCacheOrKVStore(
this.cache.teamUserConfigs,
getNamespace(teamname),
getTeamUserConfigKey(username),
jsonToTeamUserConfig
)
}
async getTeamChannelConfig(
teamname: string,
conversationId: ChatTypes.ConvIDStr
): Promise<
Errors.ResultOrError<
CachedConfig<TeamChannelConfig>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
return await this.getFromCacheOrKVStore(
this.cache.teamChannelConfigs,
getNamespace(teamname),
getTeamChannelConfigKey(conversationId),
jsonToTeamChannelConfig
)
}
async getTeamJiraSubscriptions(
teamname: string
): Promise<
Errors.ResultOrError<
CachedConfig<TeamJiraSubscriptions>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
return await this.getFromCacheOrKVStore(
this.cache.teamJiraSubscriptions,
getNamespace(teamname),
jiraSubscriptionsKey,
jsonToTeamJiraSubscriptions
)
}
async getJiraSubscriptionIndex(
urlToken: string
): Promise<
Errors.ResultOrError<
CachedConfig<JiraSubscriptionIndex>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
return await this.getFromCacheOrKVStore(
this.cache.jiraSubscriptionIndex,
jiraSubscriptionIndexNamespace,
urlToken,
jsonToJiraSubscriptionIndex
)
}
async listAllJiraSubscriptionIndices(): Promise<
Errors.ResultOrError<
Array<JiraSubscriptionIndex>,
Errors.KVStoreNotFoundError | Errors.UnknownError
>
> {
const indicesRet = await Promise.all(
await this.bot.kvstore
.listEntryKeys(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
jiraSubscriptionIndexNamespace
)
.then(res =>
(res.entryKeys || []).map(({entryKey}) =>
this.getFromCacheOrKVStore(
this.cache.jiraSubscriptionIndex,
jiraSubscriptionIndexNamespace,
entryKey,
jsonToJiraSubscriptionIndex
)
)
)
)
const result: Array<JiraSubscriptionIndex> = []
for (const indRet of indicesRet) {
if (indRet.type !== Errors.ReturnType.Ok) {
return indRet
}
result.push(indRet.result.config)
}
return Errors.makeResult(result)
}
async updateTeamJiraConfig(
teamname: string,
oldConfig: CachedConfig<TeamJiraConfig> | undefined,
newConfig: TeamJiraConfig
): Promise<
Errors.ResultOrError<
undefined,
Errors.KVStoreRevisionError | Errors.UnknownError
>
> {
return await this.updateToCacheAndKVStore(
this.cache.teamJiraConfigs,
getNamespace(teamname),
jiraConfigKey,
oldConfig,
newConfig
)
}
async updateTeamUserConfig(
teamname: string,
username: string,
oldConfig: CachedConfig<TeamUserConfig> | undefined,
newConfig: TeamUserConfig
): Promise<
Errors.ResultOrError<
undefined,
Errors.KVStoreRevisionError | Errors.UnknownError
>
> {
return await this.updateToCacheAndKVStore(
this.cache.teamUserConfigs,
getNamespace(teamname),
getTeamUserConfigKey(username),
oldConfig,
newConfig
)
}
async updateTeamChannelConfig(
teamname: string,
conversationId: ChatTypes.ConvIDStr,
oldConfig: CachedConfig<TeamChannelConfig> | undefined,
newConfig: TeamChannelConfig
): Promise<
Errors.ResultOrError<
undefined,
Errors.KVStoreRevisionError | Errors.UnknownError
>
> {
return await this.updateToCacheAndKVStore(
this.cache.teamChannelConfigs,
getNamespace(teamname),
getTeamChannelConfigKey(conversationId),
oldConfig,
newConfig
)
}
async updateTeamJiraSubscriptions(
teamname: string,
oldConfig: CachedConfig<TeamJiraSubscriptions> | undefined,
newConfig: TeamJiraSubscriptions
): Promise<
Errors.ResultOrError<
undefined,
Errors.KVStoreRevisionError | Errors.UnknownError
>
> {
return await this.updateToCacheAndKVStore(
this.cache.teamJiraSubscriptions,
getNamespace(teamname),
jiraSubscriptionsKey,
oldConfig,
newConfig,
teamJiraSubscriptionsToJson
)
}
async setOrDeleteJiraSubscriptionIndex(
urlToken: string,
index?: JiraSubscriptionIndex // set to undefined to delete
): Promise<Errors.ResultOrError<undefined, Errors.UnknownError>> {
if (!index) {
try {
await this.bot.kvstore.delete(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
jiraSubscriptionIndexNamespace,
urlToken
)
} catch (err) {
return Errors.makeUnknownError(err)
}
} else {
try {
await this.bot.kvstore.put(
`${this.botConfig.keybase.username},${this.botConfig.keybase.username}`,
jiraSubscriptionIndexNamespace,
urlToken,
JSON.stringify(index)
)
} catch (err) {
return Errors.makeUnknownError(err)
}
}
// either way delete from the cache
this.cache.jiraSubscriptionIndex.delete(urlToken)
return Errors.makeResult(undefined)
}
} | the_stack |
import EventDispatcher from "./EventDispatcher";
import Connection from "./Connection";
import AudioContextManager from "./AudioContextManager";
import BufferManager from "./BufferManager";
import PatchEvent from "../event/PatchEvent";
import ModuleDefinitions from "../config/ModuleDefinitions";
import Module from "./Module";
import ModuleTypes from "../enum/ModuleTypes";
import ModuleEvent from "../event/ModuleEvent";
import IModuleDefinition from "../config/IModuleDefinition";
import IPatchObject from "./IPatchObject";
import IModuleObject from "./IModuleObject";
class Patch extends EventDispatcher
{
public static ID_COUNT_SEPARATOR = '_'; // todo private?
public static ID_SUBPATCH_SEPARATOR = '$';
public modules:Array<Module> = [];
public parentModule:Module;
public audioContext:AudioContext;
public connections:Array<Connection> = [];
public countsByType:{[type:string]: number} = {};
public audioContextManager:AudioContextManager;
public subPatchEventHandler:any;
public moduleEventHandler:any;
public bufferManager:BufferManager;
constructor(audioContext:AudioContext, parentModule?:Module)
{
super();
this.parentModule = parentModule;
this.audioContext = audioContext;
if(!parentModule)
{
// only the root gets a listener, all subpatches bubble their events to here
this.audioContextManager = new AudioContextManager(this, audioContext);
// same for buffermanager
this.bufferManager = new BufferManager(this.audioContext);
}
// handlers for events
this.subPatchEventHandler = this.handleSubPatchEvent.bind(this);
this.moduleEventHandler = this.handleModuleEvent.bind(this);
}
/**
* Removes a given connection from the patch.
* @param connectionToRemove
*/
public removeConnection(connectionToRemove:Connection):void
{
let index:number = this.connections.indexOf(connectionToRemove);
if(index >= 0)
{
// dispatch event *before* removing, so the ACM can still figure out what connections are involved
this.dispatchEvent(PatchEvent.CONNECTION_PRE_REMOVE, {connection: connectionToRemove});
this.connections.splice(index, 1);
// editor redraws from post event
this.dispatchEvent(PatchEvent.CONNECTION_POST_REMOVE, {connection: connectionToRemove});
connectionToRemove.destruct();
}
else
{
console.error('Connection not found', connectionToRemove);
}
}
/**
* Adds a module to the patch.
* @param moduleType
* @param moduleArguments
* @param moduleObject
* @returns {Module}
*/
public addModuleByType(moduleType:string, moduleArguments?:Array<any>, moduleObject?:any):Module // moduleObject is passed when parsing a full patch json todo type
{
if(!moduleType)
{
console.error('No type given');
return;
}
let definition:IModuleDefinition = ModuleDefinitions.findByType(moduleType);
//console.log('addModuleByType', moduleType);
if(definition)
{
// init the counter for this moduletype
if(typeof this.countsByType[moduleType] === 'undefined') this.countsByType[moduleType] = 0;
// increase count
this.countsByType[moduleType]++;
// get the id of the module (module data, containing a saved id is given when loading a patch)
let moduleId:string = moduleObject ? moduleObject.id : moduleType + Patch.ID_COUNT_SEPARATOR + this.countsByType[moduleType];
// create the module TODO try/catch this all and make sure the id doesnt get incremented if all fails
let module:Module = new Module(this, definition, moduleId, moduleArguments);
this.modules.push(module);
// if the module was loaded, set the position as well (so the visualmodule doesnt set to default startposition)
if(moduleObject)
{
module.position = moduleObject.pos;
}
// if it was a subpatch, we need to give it a new patch
if(definition.type === ModuleTypes.SUBPATCH)
{
let subPatch = new Patch(this.audioContext, module);
// listen to subpatch events. should be done BEFORE setting the subpatch, so that when loaded, the events from adding nested modules
// are caught and the ACM can set the audionode
this.addEventListenersToSubPatch(subPatch);
if(!moduleObject)
{
// module was NOT created from parsing an object, create default subpatch
subPatch.addModuleByType(ModuleTypes.INPUT);
subPatch.addModuleByType(ModuleTypes.OUTPUT);
}
else
{
// module created from parsing an object, parse the subpatch
subPatch.fromObject(moduleObject.subPatch);
}
// set it
module.subPatch = subPatch;
}
// listen to module events
this.addEventListenersToModule(module);
// notify ACM (audionode gets set in this module) and the editor (visual module is created)
this.dispatchEvent(PatchEvent.MODULE_ADDED, {module: module, args: moduleArguments});
// after event has been dispatched, set the values (so there is a node to set the values on and the visual module can update its values)
if(moduleObject) module.setAttributesByLoadedObject(moduleObject);
return module; // for now only needed for parsing a json and handling a subpatch TODO what is this?!
}
else
{
console.error('No module definition found for type: ' + moduleType);
}
}
public handleModuleEvent(type:string, data:any):void
{
// dispatch as new (patch) event, so it bubbles up to the root (has exacty the same data as the ModuleEvent, but we dispatch a PatchEvent for consistency)
// TODO switch on type, even though there is only type one now
this.dispatchEvent(PatchEvent.MODULE_ATTRIBUTE_CHANGED, {module: data.module, attribute: data.attribute});
}
/**
* Add necessary listeners to module.
* @param module
*/
private addEventListenersToModule(module:Module):void
{
module.addEventListener(ModuleEvent.ATTRIBUTE_CHANGED, this.moduleEventHandler);
}
/**
* Removes necessary listeners from module.
* @param module
*/
private removeEventListenersFromModule(module:Module):void
{
module.removeEventListener(ModuleEvent.ATTRIBUTE_CHANGED, this.moduleEventHandler);
}
/**
* Add necessary listeners to subpatch.
* @param module
*/
private addEventListenersToSubPatch(subPatch:Patch):void
{
subPatch.addEventListener(PatchEvent.MODULE_ADDED, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.MODULE_REMOVED, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.CONNECTION_ADDED, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.CONNECTION_PRE_REMOVE, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.CONNECTION_POST_REMOVE, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.PATCH_CLEARED, this.subPatchEventHandler);
subPatch.addEventListener(PatchEvent.MODULE_ATTRIBUTE_CHANGED, this.subPatchEventHandler);
}
/**
* Remove necessary listeners from module.
* @param module
*/
private removeEventListenersFromSubPatch(subPatch:Patch):void
{
subPatch.removeEventListener(PatchEvent.MODULE_ADDED, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.MODULE_REMOVED, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.CONNECTION_ADDED, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.CONNECTION_PRE_REMOVE, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.CONNECTION_POST_REMOVE, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.PATCH_CLEARED, this.subPatchEventHandler);
subPatch.removeEventListener(PatchEvent.MODULE_ATTRIBUTE_CHANGED, this.subPatchEventHandler);
}
private handleSubPatchEvent(type, data):void
{
// redispatch all events, so they bubble up to the root
this.dispatchEvent(type, data);
}
public getModuleById(moduleId:string):Module
{
return this.modules.find(module => module.id === moduleId);
}
/**
* Removes a module from the patch.
* @param moduleId
*/
public removeModuleById(moduleId:string):void
{
let module:Module = this.getModuleById(moduleId);
if(module)
{
let moduleIndex:number = this.modules.indexOf(module);
// first get all connections from or to this module
var connections:Array<Connection> = this.getConnectionsForModule(module);
// remove all these connections
connections.forEach(connection => this.removeConnection(connection));
// remove module from list
this.modules.splice(moduleIndex, 1);
if(module.definition.type === ModuleTypes.SUBPATCH)
{
// and clear subpatch
module.subPatch.clear();
// if subpatch, remove listeners
this.removeEventListenersFromSubPatch(module.subPatch);
module.subPatch.destruct();
}
// do this BEFORE destruct, so that listeners can still check the module's id todo maybe do it afterwards and pass the id? or do we need the full module
this.dispatchEvent(PatchEvent.MODULE_REMOVED, {module: module});
this.removeEventListenersFromModule(module);
// destruct
module.destruct();
}
else
{
console.error('Module not found for id: ' + moduleId + ' (or not in list)');
}
}
/**
* Returns all connections for a given module.
* @param module
* @returns {Array}
*/
public getConnectionsForModule(module:Module):Array<Connection>
{
return this.connections.filter(connection => connection.sourceModule === module || connection.destinationModule === module);
}
/**
* Get all modules of type INPUT for a patch. todo do we need this? we canjust call this.getModulesByType
* @returns {Array<Module>}
*/
public getInputs():Array<Module>
{
return this.getModulesByType(ModuleTypes.INPUT);
}
/**
* Get all modules of type OUTPUT for a patch.
* @returns {Array<Module>}
*/
public getOutputs():Array<Module>
{
return this.getModulesByType(ModuleTypes.OUTPUT);
}
/**
* Returns all modules for a given type.
* @param moduleType
* @returns {Array}
*/
public getModulesByType(moduleType:string):Array<Module>
{
return this.modules.filter(module => module.definition.type === moduleType);
}
/**
* Returns all subpatch modules in the path.
* @returns {Array}
*/
public getSubPatchModules():Array<Module>
{
return this.modules.filter(module => module.definition.type === ModuleTypes.SUBPATCH);
}
/**
* Returns all connections for this patch, including all connections in nested subpatches
* @returns {Array}
*/
public getConnectionsWithNested():Array<Connection>
{
// first add connections in this patch
var connections = [];
for(var i = 0; i < this.connections.length; i++)
{
connections.push(this.connections[i]);
}
// then do the same for all submodules
var subPatchModules = this.getSubPatchModules();
for(var i = 0; i < subPatchModules.length; i++)
{
var subConnections = subPatchModules[i].subPatch.getConnectionsWithNested();
connections = connections.concat(subConnections);
}
return connections;
}
/**
* Converts the whole patch to a data-object.
* @returns {{modules: Array, connections: Array}}
*/
public toObject():IPatchObject
{
var object:IPatchObject = {modules: [], connections: []};
this.modules.forEach(module =>
{
let moduleObject:IModuleObject = {
id: module.id,
pos: null,
args: null,
attributes: null,
subPatch: null
};
if(module.position) moduleObject.pos = {x: module.position.x, y: module.position.y};
// add constructor arguments
if(module.args && module.args.length > 0) moduleObject.args = module.args;
// add audioparams
if(module.definition.attributes)
{
var attributesObject = [];
module.definition.attributes.forEach(attribute =>
{
attributesObject.push({
id: attribute.id,
value: module.getAttributeValue(attribute.id)
});
});
moduleObject.attributes = attributesObject;
}
// nested subpatch
if(module.definition.type === ModuleTypes.SUBPATCH) moduleObject.subPatch = module.subPatch.toObject();
object.modules.push(moduleObject);
});
this.connections.forEach(connection =>
{
object.connections.push({
source: connection.sourceModule.id,
sourceOutput: connection.sourceOutputIndex,
destination: connection.destinationModule.id,
destinationInput: connection.destinationInputIndex,
});
});
return object;
}
/**
* Creates a new connection in the patch
* @param sourceModuleId
* @param sourceOutputIndex
* @param destinationModuleId
* @param destinationInputIndex
* @param reconnect
* @returns {boolean}
*/
public addConnection(sourceModuleId:string, sourceOutputIndex:number, destinationModuleId:string, destinationInputIndex:number, reconnect:boolean = false):boolean
{
let sourceModule:Module = this.getModuleById(sourceModuleId);
let destinationModule:Module = this.getModuleById(destinationModuleId);
if(sourceModule && destinationModule)
{
if(sourceModule.outputIndexIsValid(sourceOutputIndex) && destinationModule.inputIndexIsValid(destinationInputIndex))
{
var connection = new Connection(sourceModule, sourceOutputIndex, destinationModule, destinationInputIndex);
// TODO check if connection doesnt exist already
this.connections.push(connection);
// reconnect defines this connection as a reconnection after an output was disconnected // todo what is this
this.dispatchEvent(PatchEvent.CONNECTION_ADDED, {connection: connection, reconnect: reconnect || false});
return true;
}
else
{
console.error('Output and/or input index is invalid');
}
}
else
{
console.error('Source and/or destination for found for ids: ' + sourceModuleId + ', ' + destinationModuleId);
}
return false;
}
/**
* Create a patch from an object
* @param patchObject
*/
public fromObject(patchObject):void
{
for(var i = 0; i < patchObject.modules.length; i++)
{
var moduleObject = patchObject.modules[i];
var type = moduleObject.id.split(Patch.ID_COUNT_SEPARATOR)[0];
//console.log(type, moduleObject);
// todo fix this
var module = this.addModuleByType(type, moduleObject.args, moduleObject); // TODO this is a strange way to pass arguments
//module.setAttributesByLoadedObject(moduleObject);
}
for(var i = 0; i < patchObject.connections.length; i++)
{
var loadedConnectionData = patchObject.connections[i];
this.addConnection(
loadedConnectionData.source,
loadedConnectionData.sourceOutput,
loadedConnectionData.destination,
loadedConnectionData.destinationInput
);
}
}
/**
* Returns the root patch of this patch (if it exists)
* @returns {Patch}
*/
public getRootPatch():Patch
{
let parentPatch:Patch = this.getParentPatch();
if(!parentPatch)
{
return this;
}
else
{
while(parentPatch.hasParentPatch())
{
parentPatch = parentPatch.getParentPatch();
}
return parentPatch;
}
}
/**
* Returns a list of ids for all parents of this patch.
* @returns {T[]}
*/
public createParentList():Array<string>
{
let results:Array<string> = [];
let parentModule:Module = this.parentModule;
while(parentModule)
{
results.push(parentModule.id);
parentModule = parentModule.parentPatch.parentModule;
}
return results.reverse();
}
public hasParentPatch():boolean
{
return this.parentModule ? true : false;
}
public getParentPatch():Patch
{
return this.parentModule ? this.parentModule.parentPatch : null;
}
/**
* Removes every module and connection from the patch.
*/
public clear():void
{
// todo foreach?
for(var i = this.modules.length - 1; i >= 0; i--)
{
this.removeModuleById(this.modules[i].id);
}
// reset counts
this.countsByType = {};
this.dispatchEvent(PatchEvent.PATCH_CLEARED);
}
public destruct():void
{
this.removeAllEventListeners();
this.audioContext = null;
this.parentModule = null;
this.modules = null;
this.connections = null;
this.countsByType = null;
this.moduleEventHandler = null;
this.subPatchEventHandler = null;
}
}
export default Patch; | the_stack |
import * as vscode from "vscode";
import { executeCommand, ExpectedDocument, groupTestsByParentName } from "../utils";
suite("./test/suite/commands/seek-word.md", function () {
// Set up document.
let document: vscode.TextDocument,
editor: vscode.TextEditor;
this.beforeAll(async () => {
document = await vscode.workspace.openTextDocument();
editor = await vscode.window.showTextDocument(document);
editor.options.insertSpaces = true;
editor.options.tabSize = 2;
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
});
this.afterAll(async () => {
await executeCommand("workbench.action.closeActiveEditor");
});
test("1 > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:10:1", 6, String.raw`
console.log()
^^^^^^^ 0
`);
});
test("1 > word-end > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^^^^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:20:1", 6, String.raw`
console.log()
^ 0
`);
});
test("1 > word-end > x > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:30:1", 6, String.raw`
console.log()
^^^ 0
`);
});
test("1 > word-end > x > x > word-start-backward", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:40:1", 6, String.raw`
console.log()
|^^ 0
`);
});
test("1 > word-end > x > x > word-start-backward > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
|^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:50:1", 6, String.raw`
console.log()
^ 0
`);
});
test("1 > word-end > x > x > word-start-backward > x > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:60:1", 6, String.raw`
console.log()
|^^^^^^ 0
`);
});
test("1 > word-end-extend", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd.extend");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:70:1", 6, String.raw`
console.log()
^^^^^^^ 0
`);
});
test("1 > word-end-extend > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^^^^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd.extend");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:80:1", 6, String.raw`
console.log()
^^^^^^^^ 0
`);
});
test("1 > word-end-extend > x > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^^^^^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd.extend");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:90:1", 6, String.raw`
console.log()
^^^^^^^^^^^ 0
`);
});
test("1 > word-end-extend > x > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
console.log()
^^^^^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:100:1", 6, String.raw`
console.log()
^^^ 0
`);
});
test("2 > word-start-backward", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
foo
bar
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:121:1", 6, String.raw`
foo
|^^ 0
bar
`);
});
test("3 > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb ccc ddd
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:144:1", 6, String.raw`
aaa bbb ccc ddd
^^^^ 0
`);
});
test("3 > word-end > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb ccc ddd
^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:154:1", 6, String.raw`
aaa bbb ccc ddd
^^^^ 0
`);
});
test("3 > word-end > x > word-start-backward", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb ccc ddd
^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:164:1", 6, String.raw`
aaa bbb ccc ddd
|^^ 0
`);
});
test("3 > word-end > x > word-start-backward-2", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb ccc ddd
^^^^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward", { count: 2 });
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:174:1", 6, String.raw`
aaa bbb ccc ddd
|^^^ 0
`);
});
test("4 > word-start", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb
^ 0
ccc
dd
`);
// Perform all operations.
await executeCommand("dance.seek.word");
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:193:1", 6, String.raw`
aaa bbb
^^^ 0
ccc
dd
`);
});
test("4 > word-start > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
aaa bbb
^^^ 0
ccc
dd
`);
// Perform all operations.
await executeCommand("dance.seek.wordEnd");
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:205:1", 6, String.raw`
aaa bbb
ccc
^^^^^ 0
dd
`);
});
test("5 > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
foo x bar.baz ex
^ 0
la
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:227:1", 6, String.raw`
foo x bar.baz ex
^^^ 0
la
`);
});
test("5 > word-end > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
foo x bar.baz ex
^^^ 0
la
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:238:1", 6, String.raw`
foo x bar.baz ex
^^ 0
la
`);
});
test("5 > word-end > x > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
foo x bar.baz ex
^^ 0
la
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:249:1", 6, String.raw`
foo x bar.baz ex
^^^^ 0
la
`);
});
test("5 > word-end > x > x > x", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
foo x bar.baz ex
^^^^ 0
la
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:260:1", 6, String.raw`
foo x bar.baz ex
^ 0
la
`);
});
test("6 > word-end", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:280:1", 6, String.raw`
a b c d
^^ 0
`);
});
test("6 > word-start", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:290:1", 6, String.raw`
a b c d
^ 0
`);
});
test("6 > word-start-backward", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:300:1", 6, String.raw`
a b c d
|^ 0
`);
});
test("6 > word-end-extend", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.wordEnd.extend");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:310:1", 6, String.raw`
a b c d
^^^ 0
`);
});
test("6 > word-start-extend", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.extend");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:320:1", 6, String.raw`
a b c d
^^ 0
`);
});
test("6 > word-start-extend-backward", async function () {
// Set-up document to be in expected initial state.
await ExpectedDocument.apply(editor, 6, String.raw`
a b c d
^ 0
`);
// Perform all operations.
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "character" });
await executeCommand("dance.seek.word.extend.backward");
await executeCommand("dance.dev.setSelectionBehavior", { mode: "normal", value: "caret" });
// Ensure document is as expected.
ExpectedDocument.assertEquals(editor, "./test/suite/commands/seek-word.md:330:1", 6, String.raw`
a b c d
|^^ 0
`);
});
groupTestsByParentName(this);
}); | the_stack |
import { Coordinate, Geometry, VectorLayer } from "maptalks";
import maptalks = require("maptalks");
const map5 = new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 14,
pitch: 45,
// allow map to drag pitching, true by default
dragPitch: true,
// allow map to drag rotating, true by default
dragRotate: true,
// enable map to drag pitching and rotating at the same time, false by default
dragRotatePitch: true,
// attribution: true,
zoomControl: true, // add zoom control
scaleControl: true, // add scale control
overviewControl: true, // add overview control
centerCross: true,
minZoom: 14, // set map's min zoom to 14
maxZoom: 14, // set map's max zoom to 14
attribution: {
content: "© BoudlessGeo",
},
baseLayer: new maptalks.GroupTileLayer("base", [
new maptalks.TileLayer("base", {
renderer: "canvas", // set TileLayer's renderer to canvas
crossOrigin: "anonymous",
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
new maptalks.WMSTileLayer("wms", {
urlTemplate: "https:// demo.boundlessgeo.com/geoserver/ows",
crs: "EPSG:3857",
layers: "ne:ne",
styles: "",
version: "1.3.0",
format: "image/png",
transparent: true,
uppercase: true,
}),
]),
layers: [
new maptalks.TileLayer("boudaries", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/dark_only_labels/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
}),
new maptalks.VectorLayer("v"),
],
});
map5.setPitch(30);
map5.setBearing(20);
function reset() {
requestAnimationFrame(() => {
map5.setPitch(0);
map5.setBearing(0);
});
}
new maptalks.control.Toolbar({
items: [
{
item: "pause",
click() {},
},
{
item: "start",
click() {},
},
{
item: "reset",
click() {
reset();
},
},
],
}).addTo(map5);
function up() {
map5.panBy([0, -200]);
}
function down() {
map5.panBy([0, 200]);
}
function left() {
map5.panBy([-200, 0]);
}
function right() {
map5.panBy([200, 0]);
}
function toCoordinate() {
const symbol = {
markerType: "x",
markerLineColor: "#f00",
markerLineWidth: 4,
markerWidth: 20,
markerHeight: 20,
};
const coordinate = map5.getCenter().add(0.008, 0.008);
(<VectorLayer> map5.getLayer("v")).clear().addGeometry(new maptalks.Marker(coordinate, { symbol }));
map5.panTo(coordinate);
}
new maptalks.control.Toolbar({
items: [
{
item: "↑",
click: up,
},
{
item: "↓",
click: down,
},
{
item: "←",
click: left,
},
{
item: "→",
click: right,
},
{
item: "pan to",
click: toCoordinate,
},
],
}).addTo(map5);
map5.on("zoomend moving moveend", getStatus);
getStatus();
function getStatus(): void {}
const center = map5.getCenter();
const polygon2222 = new maptalks.Polygon(
[center.add(-0.005, 0.005), center.add(0.005, 0.005), center.add(0.005, -0.005), center.add(-0.005, -0.005)],
{
symbol: {
polygonFill: "#fff",
polygonOpacity: 0.5,
},
},
);
(<VectorLayer> map5.getLayer("v")).addGeometry(polygon2222);
function fitExtent() {
// fit map's extent to polygon's
// 0 is the zoom offset
map5.fitExtent(polygon.getExtent(), 0);
}
const extent = map5.getExtent();
// set map's max extent to map's extent at zoom 14
map5.setMaxExtent(extent);
map5.setZoom(map5.getZoom() - 2, { animation: false });
(<VectorLayer> map5.getLayer("v")).addGeometry(
new maptalks.Polygon(extent.toArray(), {
symbol: { polygonOpacity: 0, lineWidth: 5 },
}),
);
// or you can set zoom limit by
map5.setMinZoom(14).setMaxZoom(14);
map5.on("click", (param: any) => {
const infoDom = document.getElementById("info");
if (infoDom) {
infoDom.innerHTML = "info";
}
});
new maptalks.Map("map1", {
center: map5.getCenter(),
zoom: map5.getZoom(),
draggable: false, // disable draggble
scrollWheelZoom: false, // disable scroll wheel zoom
dblClickZoom: false, // disable doubleclick
baseLayer: new maptalks.TileLayer("base1", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
map5.on("moving moveend", (e: any) => {
map1.setCenter(e.target.getCenter());
});
map5.on("zooming zoomend", (e: any) => {
map1.setCenterAndZoom(e.target.getCenter(), e.target.getZoom());
});
map5.on("pitch", (e: any) => {
map1.setPitch(e.target.getPitch());
});
map5.on("rotate", (e: any) => {
map1.setBearing(e.target.getBearing());
});
new maptalks.control.Toolbar({
position: "top-right",
items: [
{
item: "move me",
click() {},
},
],
}).addTo(map5);
map5.on("moving moveend zoomend", update);
update();
function update() {
const coorEle = document.getElementById("coordinate");
if (coorEle) {
coorEle.innerHTML = "coordiante info";
}
}
new maptalks.VectorLayer("v", new maptalks.Marker(map5.getCenter())).addTo(map5);
// Export map to an image
// External image(tiles, marker images) hosts need to support CORS
function save() {
const data = map5.toDataURL({
mimeType: "image/jpeg", // or 'image/png'
save: true, // to pop a save dialog
fileName: "map", // file name
});
}
let mousePosition: any;
map5.on("mousemove", (e: any) => {
mousePosition = e.containerPoint;
map5.getRenderer().setToRedraw();
});
map5.on("mouseout", () => {
mousePosition = null;
map5.getRenderer().setToRedraw();
});
map5.on("renderend", (e: any) => {
if (!mousePosition) {
return;
}
});
// draw image data into a canvas, and clip it by a circle with diameter of size
function createMagCircle(imageData: ImageData, size: number) {
const magImg = document.createElement("canvas");
const magCircle = document.createElement("canvas");
magImg.width = magImg.height = magCircle.width = magCircle.height = size;
const canvasRenderContext = magImg.getContext("2d");
if (canvasRenderContext) {
canvasRenderContext.putImageData(imageData, 0, 0);
}
const ctx = magCircle.getContext("2d");
if (ctx) {
ctx.beginPath();
ctx.arc(size / 2, size / 2, size / 2, 0, 2 * Math.PI);
// clip canvas
ctx.clip();
ctx.drawImage(magImg, 0, 0);
}
return magCircle;
}
const arcUrl = "https:// services.arcgisonline.com/arcgis/rest/services/ESRI_Imagery_World_2D/MapServer";
maptalks.SpatialReference.loadArcgis(arcUrl + "?f=pjson", (err: any, conf: any) => {
if (err) {
throw new Error(err);
}
const ref = conf.spatialReference;
ref.projection = "EPSG:4326";
new maptalks.Map("map", {
center: [121, 0],
zoom: 1,
minZoom: 1,
maxZoom: 16,
spatialReference: ref,
baseLayer: new maptalks.TileLayer("base", {
tileSystem: conf.tileSystem,
tileSize: conf.tileSize, // [512, 512]
urlTemplate: arcUrl + "/tile/{z}/{y}/{x}",
attribution: `© <a target="_blank" href="${arcUrl}">ArcGIS</a>`,
}),
});
});
const url =
"https:// t0.tianditu.gov.cn/vec_c/wmts?request=GetCapabilities&service=wmts&tk=de0dc270a51aaca3dd4e64d4f8c81ff6";
maptalks.SpatialReference.loadWMTS(url, (err: any, conf: any) => {
if (err) {
throw new Error(err);
}
const params = conf[0];
params.urlTemplate += "&tk=de0dc270a51aaca3dd4e64d4f8c81ff6";
const spatialReference = params.spatialReference;
const tileLayer = new maptalks.TileLayer("tilelayer", params);
new maptalks.Map("map", {
center: [114.3404041441181, 30.548730054693106],
zoom: 10,
spatialReference,
baseLayer: tileLayer,
});
});
// A complete customized TileLayer
const resolutions = [];
const dd = 2 * 6378137 * Math.PI;
for (let i = 0; i < 21; i++) {
resolutions[i] = dd / (256 * Math.pow(2, i));
}
const map28 = new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 13,
// a custom version of default web-mercator spatial reference
// map's spatial reference definition
spatialReference: {
projection: "EPSG:3857", // geo projection, can be a string or a function
resolutions,
fullExtent: {
// map's full extent
top: 6378137 * Math.PI,
left: -6378137 * Math.PI,
bottom: -6378137 * Math.PI,
right: 6378137 * Math.PI,
},
},
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
tileSystem: [1, -1, -20037508.34, 20037508.34], // tile system
minZoom: 1,
maxZoom: 20,
}),
});
const baseLayer = new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
});
// generate tile url
baseLayer.getTileUrl = (x, y, z) => {
return `${x}/${y}/${z}`;
};
baseLayer.on("renderercreate", (e: any) => {
// load tile image
// img(Image): an Image object
// url(String): the url of the tile
e.renderer.loadTileImage = (img: any, url: any) => {
// mocking getting image's base64
// replace it by your own, e.g. load from sqlite database
const remoteImage = new Image();
remoteImage.crossOrigin = "anonymous";
remoteImage.onload = () => {
const base64 = getBase64Image(remoteImage);
img.src = base64;
};
remoteImage.src = url;
};
});
function getBase64Image(img: any) {
const canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
const ctx = canvas.getContext("2d");
if (ctx) {
ctx.drawImage(img, 0, 0);
}
const dataURL = canvas.toDataURL("image/png");
return dataURL;
}
const map29 = new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 11,
baseLayer,
});
const map30 = new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 11,
attribution: {
content:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>, © ESRI',
},
baseLayer: new maptalks.TileLayer("base", {
urlTemplate:
"https:// server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}.jpg",
}),
});
map30.addLayer(
new maptalks.TileLayer("carto", {
opacity: 0.6, // TileLayer's opacity, 0-1
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
);
const tileLayer = new maptalks.TileLayer("carto", {
urlTemplate: "https:// {s}.tile.openstreetmap5.org/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c"],
// fragment shader from webglfundamentals.org
// https:// webglfundamentals.org/webgl/lessons/webgl-image-processing.html
fragmentShader: [
"precision mediump float;" +
"uniform sampler2D u_image;" +
"uniform vec2 u_textureSize;" +
"uniform float u_kernel[9];" +
"uniform float u_opacity;" +
"uniform float u_kernelWeight;" +
"varying vec2 v_texCoord;" +
"void main() {" +
"vec2 onePixel = vec2(1.0, 1.0) / u_textureSize;" +
"vec4 colorSum =" +
"texture2D(u_image, v_texCoord + onePixel * vec2(-1, -1)) * u_kernel[0] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 0, -1)) * u_kernel[1] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 1, -1)) * u_kernel[2] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2(-1, 0)) * u_kernel[3] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 0, 0)) * u_kernel[4] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 1, 0)) * u_kernel[5] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2(-1, 1)) * u_kernel[6] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 0, 1)) * u_kernel[7] +" +
"texture2D(u_image, v_texCoord + onePixel * vec2( 1, 1)) * u_kernel[8] ;" +
"gl_FragColor = vec4((colorSum / u_kernelWeight).rgb, 1) * u_opacity;" +
"}",
].join("\n"),
});
tileLayer.on("canvascreate", (e: any) => {
// set uniform values in shader
const gl = e.gl;
const program = gl.program;
const textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
const kernelLocation = gl.getUniformLocation(program, "u_kernel[0]");
const kernelWeightLocation = gl.getUniformLocation(program, "u_kernelWeight");
// kernels of sobelVertical in the original example
const kernels = [1, 0, -1, 2, 0, -2, 1, 0, -1];
gl.uniform2f(textureSizeLocation, 256, 256);
gl.uniform1fv(kernelLocation, new Float32Array(kernels));
gl.uniform1f(kernelWeightLocation, computeKernelWeight(kernels));
});
const map211 = new maptalks.Map("map1", {
center: [-0.113049, 51.498568],
zoom: 4,
attribution: {
content: "© OpenStreetMap",
},
baseLayer: tileLayer,
});
function computeKernelWeight(kernel: any) {
const weight = kernel.reduce((prev: any, curr: any) => {
return prev + curr;
});
return weight <= 0 ? 1 : weight;
}
// original
const map2111 = new maptalks.Map("map0", {
center: [-0.113049, 51.498568],
zoom: 4,
attribution: {
content: "© OpenStreetMap",
},
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.tile.openstreetmap5.org/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c"],
}),
});
// Jianghan district's boundary, from boundary.js
const boundary = [
[
[114.28039004422378, 30.597258563674494],
[114.280751671522, 30.597182229947677],
],
];
const mask = new maptalks.Polygon(boundary, {
symbol: [
{
lineColor: "#ccc",
lineWidth: 8,
polygonFillOpacity: 0,
},
{
lineColor: "#404040",
lineWidth: 6,
polygonFillOpacity: 0,
},
],
});
// Copy the mask to add as mask's outline
const outline = mask.copy();
const maskedLayer = new maptalks.TileLayer("masked", {
urlTemplate: (x, y, z, domain) => {
return "https:// {s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png";
},
subdomains: ["a", "b", "c", "d"],
})
.setMask(mask) // set boundary as the mask to the tilelayer
.addTo(map5);
// District's name
const title = new maptalks.Marker(mask.getCenter(), {
symbol: {
textName: "JiangHan District",
textFaceName: "sans-serif",
textSize: 32,
textFill: "#1bbc9b",
textHaloFill: "#fff",
textHaloRadius: 5,
textDx: -30,
},
});
new maptalks.VectorLayer("v", [outline, title]).addTo(map5);
const map213 = new maptalks.Map("map", {
center: [105.08052356963802, 36.04231948670001],
zoom: 4,
minZoom: 1,
maxZoom: 18,
spatialReference: {
projection: "EPSG:4326",
},
baseLayer: new maptalks.TileLayer("base", {
tileSystem: [1, -1, -180, 90],
urlTemplate:
"http:// t{s}.tianditu.com/DataServer?T=vec_c&x={x}&y={y}&l={z}&tk=de0dc270a51aaca3dd4e64d4f8c81ff6",
subdomains: ["1", "2", "3", "4", "5"],
attribution: `© <a target="_blank" href="http:// www.tianditu.cn">Tianditu</a>`,
}),
layers: [
new maptalks.TileLayer("road", {
urlTemplate:
"http:// t{s}.tianditu.com/DataServer?T=cva_c&x={x}&y={y}&l={z}&tk=de0dc270a51aaca3dd4e64d4f8c81ff6",
subdomains: ["1", "2", "3", "4", "5"],
opacity: 1,
}),
],
});
const map214 = new maptalks.Map("map", {
center: [0, 0],
zoom: 4,
spatialReference: {
projection: "identity",
resolutions: [32, 16, 8, 4, 2, 1],
fullExtent: {
top: 10000,
left: -10000,
bottom: -10000,
right: 10000,
},
},
});
const soccerField = [
// field
new maptalks.Rectangle([-400, 260], 800, 520, {
symbol: {
lineWidth: 2,
lineColor: "#fff",
polygonFill: "rgb(0, 129, 0)",
},
}),
// halfway line
new maptalks.LineString(
[
[0, -260],
[0, 260],
],
{
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
},
),
// center circle
new maptalks.Circle([0, 0], 70, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
}),
// penalty arc
new maptalks.Sector([-315, 0], 60, -60, 60, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
}),
// penalty arc
new maptalks.Sector([315, 0], 60, 120, 240, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
}),
// penalty area
new maptalks.Rectangle([-400, 155], 120, 310, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
polygonFill: "rgb(0, 129, 0)",
},
}),
// penalty area
new maptalks.Rectangle([400 - 120, 155], 120, 310, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
polygonFill: "rgb(0, 129, 0)",
},
}),
// goal area
new maptalks.Rectangle([-400, 68], 42, 136, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
}),
// goal area
new maptalks.Rectangle([400 - 42, 68], 42, 136, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
},
}),
// penalty mark
new maptalks.Marker([315, 0], {
symbol: {
markerType: "ellipse",
markerWidth: 2,
markerHeight: 2,
markerFill: "#fff",
markerLineColor: "#fff",
},
}),
// penalty mark
new maptalks.Marker([-315, 0], {
symbol: {
markerType: "ellipse",
markerWidth: 2,
markerHeight: 2,
markerFill: "#fff",
markerLineColor: "#fff",
},
}),
];
new maptalks.VectorLayer("field", soccerField).addTo(map5);
const map215 = new maptalks.Map("map", {
center: [105.08052356963802, 36.04231948670001],
zoom: 5,
minZoom: 1,
maxZoom: 19,
spatialReference: {
projection: "baidu",
},
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "http:// online{s}.map5.bdimg.com/onlinelabel/?qt=tile&x={x}&y={y}&z={z}&styles=pl&scaler=1&p=1",
subdomains: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
attribution: '© <a target="_blank" href="http:// map5.baidu.com">Baidu</a>',
}),
});
// EPSG:3857's proj definition
const proj3857 =
"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs";
// <script type="text/javascript" src="proj4.js"></script>
const proj4Fun = (name: string, proj: string): any => {};
const proj4 = proj4Fun("WGS84", proj3857);
// define a custom projection object
const projection = {
code: "proj4-merc", // code of the projection
project(c: any) {
// from wgs84 to EPSG3857
const pc = proj4.forward(c.toArray());
return new maptalks.Coordinate(pc);
},
unproject(pc: any) {
// from EPSG3857 to wgs84
const c = proj4.inverse(pc.toArray());
return new maptalks.Coordinate(c);
},
};
new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 13,
// spatial reference definition
spatialReference: {
projection, // geo projection, defined by proj4js
resolutions: [
// map's zoom levels and resolutions
156543.03392804097,
78271.51696402048,
9135.75848201024,
19567.87924100512,
9783.93962050256,
4891.96981025128,
2445.98490512564,
1222.99245256282,
611.49622628141,
305.748113140705,
152.8740565703525,
76.43702828517625,
38.21851414258813,
19.109257071294063,
9.554628535647032,
4.777314267823516,
2.388657133911758,
1.194328566955879,
0.5971642834779395,
0.29858214173896974,
],
fullExtent: {
// map's full extent
top: 6378137 * Math.PI,
left: -6378137 * Math.PI,
bottom: -6378137 * Math.PI,
right: 6378137 * Math.PI,
},
},
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
const geometries33333 = maptalks.GeoJSON.toGeometry("");
const symbol = {
lineColor: "#fff",
lineWidth: 0.5,
polygonOpacity: 1,
polygonFill: "#747474",
};
new maptalks.VectorLayer("v", geometries33333, { geometryEvents: false, enableSimplify: false })
.forEach((geo: Geometry) => {
geo.setSymbol(symbol);
})
.addTo(map5);
new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 9,
minZoom: 4,
maxZoom: 18,
spatialReference: {
projection: "EPSG:4326",
},
baseLayer: new maptalks.TileLayer("base", {
spatialReference: {
projection: "EPSG:3857",
// other properties necessary for spatial reference
},
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
new maptalks.Map("map", {
center: [105.08052356963802, 36.04231948670001],
zoom: 5,
minZoom: 1,
maxZoom: 19,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
// css filter
cssFilter: "sepia(100%) invert(90%)",
}),
});
const point2 = new maptalks.Marker([-0.113049, 51.498568], {
visible: true,
editable: true,
cursor: "pointer",
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
textFaceName: "sans-serif",
textName: "MapTalks",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
});
new maptalks.VectorLayer("vector", point2).addTo(map5);
const line1 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
],
{
arrowStyle: null, // arrow-style : now we only have classic
arrowPlacement: "vertex-last", // arrow's placement: vertex-first, vertex-last, vertex-firstlast, point
visible: true,
editable: true,
cursor: null,
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
},
);
new maptalks.VectorLayer("vector", line1).addTo(map5);
const polygon2 = new maptalks.Polygon(
[
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.493568],
[-0.131049, 51.493568],
[-0.131049, 51.498568],
],
],
{
visible: true,
editable: true,
cursor: "pointer",
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
const c = new maptalks.Coordinate(-0.113049, 51.498568);
new maptalks.VectorLayer("vector", polygon2).addTo(map5);
const marker22 = new maptalks.Marker(c.add(-0.018, 0.007), {
symbol: {
textFaceName: "sans-serif",
textName: "MapTalks",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
});
const line2 = new maptalks.LineString([c.add(-0.018, 0.005), c.add(0.006, 0.005)], {
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
});
const polygon3 = new maptalks.Polygon(
[c.add(-0.018, 0.004), c.add(0.006, 0.004), c.add(0.006, -0.001), c.add(-0.018, -0.001), c.add(-0.018, 0.004)],
{
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
const collection2 = new maptalks.GeometryCollection([marker22, line2, polygon3]);
new maptalks.VectorLayer("vector", collection2).addTo(map5);
const center2 = new maptalks.Coordinate(-0.113049, 51.498568);
const map35 = new maptalks.Map("map", {
center: center2,
zoom: 14,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
const multipoint = new maptalks.MultiPoint(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.493568],
[-0.131049, 51.493568],
[-0.131049, 51.498568],
],
{
visible: true,
editable: true,
cursor: "pointer",
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
textFaceName: "sans-serif",
textName: "point",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
},
);
new maptalks.VectorLayer("vector", multipoint).addTo(map35);
const multiline = new maptalks.MultiLineString(
[
[
[-0.131049, 51.503568],
[-0.107049, 51.503568],
],
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
],
[
[-0.131049, 51.493568],
[-0.107049, 51.493568],
],
],
{
arrowStyle: null, // arrow-style : now we only have classic
arrowPlacement: "vertex-last", // arrow's placement: vertex-first, vertex-last, vertex-firstlast, point
visible: true,
editable: true,
cursor: null,
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
},
);
new maptalks.VectorLayer("vector", multiline).addTo(map5);
const multiPolygon = new maptalks.MultiPolygon(
[
[
[
[-0.131049, 51.503568],
[-0.107049, 51.503568],
[-0.107049, 51.501568],
[-0.131049, 51.501568],
],
],
[
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.496568],
[-0.131049, 51.496568],
],
],
[
[
[-0.131049, 51.493568],
[-0.107049, 51.493568],
[-0.107049, 51.491568],
[-0.131049, 51.491568],
],
],
],
{
visible: true,
editable: true,
cursor: null,
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
new maptalks.VectorLayer("vector", multiPolygon).addTo(map5);
const rectangle = new maptalks.Rectangle(center.add(-0.018, 0.012), 800, 700, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#34495e",
polygonOpacity: 0.4,
},
});
const circle = new maptalks.Circle(center.add(0.002, 0.008), 500, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#1bbc9b",
polygonOpacity: 0.4,
},
});
const sector = new maptalks.Sector(center.add(-0.013, -0.001), 900, 240, 300, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.4,
},
});
const ellipse = new maptalks.Ellipse(center.add(0.003, -0.005), 1000, 600, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(216,115,149)",
polygonOpacity: 0.4,
},
});
new maptalks.VectorLayer("vector").addGeometry([rectangle, circle, sector, ellipse]).addTo(map5);
const arc = new maptalks.ArcCurve(
[
c.add(-0.0202, 0.0081),
c.add(-0.0269, 0.0069),
c.add(-0.0369, 0.0032),
c.add(-0.0314, -0.003),
c.add(-0.0278, -0.008),
c.add(-0.022, -0.009),
],
{
symbol: getSymbol("Arc"),
},
);
const quad = new maptalks.QuadBezierCurve(
[
c.add(-0.0102, 0.0081),
c.add(-0.0169, 0.0069),
c.add(-0.0211, 0.0032),
c.add(-0.0214, -0.0033),
c.add(-0.0178, -0.0086),
c.add(-0.012, -0.0095),
],
{
symbol: getSymbol("Quadratic\nBézier"),
},
);
const cubic = new maptalks.CubicBezierCurve(
[
c.add(-0.0002, 0.0081),
c.add(-0.0069, 0.0069),
c.add(-0.0069, 0.0032),
c.add(-0.0114, -0.0033),
c.add(-0.0078, -0.0086),
c.add(-0.002, -0.0095),
],
{
symbol: getSymbol("Cubic\nBézier"),
},
);
new maptalks.VectorLayer("vector", [arc, quad, cubic]).addTo(map5);
function getSymbol(title: string) {
return [
{
lineColor: "#34495e",
lineWidth: 3,
},
{
markerType: "ellipse",
markerWidth: 8,
markerHeight: 8,
markerFill: "#f00",
markerPlacement: "vertex",
},
{
textName: title,
textFill: "#f00",
textWeight: "bold",
textHaloColor: "#fff",
textHaloRadius: 3,
textSize: 20,
textWrapCharacter: "\n",
},
];
}
const label2 = new maptalks.Label("label without box", [-0.126049, 51.496568], {
draggable: true,
textSymbol: {
textFaceName: "monospace",
textFill: "#34495e",
textHaloFill: "#fff",
textHaloRadius: 4,
textSize: 18,
textWeight: "bold",
textVerticalAlignment: "top",
},
});
const labelBox = new maptalks.Label("label with box", [-0.109049, 51.496568], {
draggable: true,
boxStyle: {
padding: [12, 8],
verticalAlignment: "top",
horizontalAlignment: "left",
minWidth: 200,
minHeight: 30,
symbol: {
markerType: "square",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 0.9,
markerLineColor: "#34495e",
markerLineWidth: 1,
},
},
textSymbol: {
textFaceName: "monospace",
textFill: "#34495e",
textHaloFill: "#fff",
textHaloRadius: 4,
textSize: 18,
textWeight: "bold",
textVerticalAlignment: "top",
},
});
new maptalks.VectorLayer("vector", [labelBox, label2]).addTo(map5);
const textbox2 = new maptalks.TextBox(
"This is a textbox, with very long content", // content
[-0.113049, 51.498568], // coordinate
200, // width
90, // height
{
draggable: true,
textStyle: {
wrap: true, // auto wrap text
padding: [12, 8], // padding of textbox
verticalAlignment: "top",
horizontalAlignment: "right",
symbol: {
textFaceName: "monospace",
textFill: "#34495e",
textHaloFill: "#fff",
textHaloRadius: 4,
textSize: 18,
textWeight: "bold",
},
},
boxSymbol: {
// box's symbol
markerType: "square",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 0.9,
markerLineColor: "#34495e",
markerLineWidth: 1,
},
},
);
new maptalks.VectorLayer("vector", textbox2).addTo(map5);
const layer3 = new maptalks.VectorLayer("vector").addTo(map5);
// blue circle
const src = new maptalks.Marker([-0.128449, 51.503568], {
symbol: {
markerType: "ellipse",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 0.8,
markerLineColor: "#fff",
markerLineWidth: 3,
markerWidth: 120,
markerHeight: 120,
},
});
// red circle
const dst = new maptalks.Marker([-0.102149, 51.503568], {
draggable: true,
symbol: [
{
markerType: "ellipse",
markerFill: "rgb(216,115,149)",
markerFillOpacity: 0.8,
markerLineColor: "#fff",
markerLineWidth: 3,
markerWidth: 70,
markerHeight: 70,
},
{
textName: "Drag\nMe",
textSize: 18,
textFill: "#fff",
textWrapCharacter: "\n",
},
],
});
// connector line
const line3 = new maptalks.ConnectorLine(src, dst, {
showOn: "always", // 'moving', 'click', 'mouseover', 'always'
arrowStyle: "classic",
arrowPlacement: "vertex-last", // 'vertex-last', // vertex-first, vertex-last, vertex-firstlast, point
symbol: {
lineColor: "#34495e",
lineWidth: 2,
},
});
layer3.addGeometry([src, dst, line3]);
const src2 = src.copy().translate(0, -0.01);
const dst2 = dst.copy().translate(0, -0.01);
// Arc Connector Line
const line4 = new maptalks.ArcConnectorLine(src2, dst2, {
arcDegree: 90,
showOn: "always",
symbol: {
lineColor: "#34495e",
lineWidth: 2,
},
});
layer3.addGeometry([src2, dst2, line4]);
const copyLayer = new maptalks.VectorLayer("copy").addTo(map5);
const rect = new maptalks.Rectangle([-0.121049, 51.50656], 800, 600, {
symbol: {
lineColor: "#fff",
lineWidth: 2,
polygonFill: "rgb(216,115,149)",
polygonOpacity: 0.7,
},
}).addTo(layer3);
let counter = 1;
function copy() {
// copy with translation of [0.003, -0.003]
rect.copy()
.translate(0.003 * counter, -0.003 * counter)
.addTo(copyLayer);
counter++;
}
function clear() {
counter = 1;
copyLayer.clear();
}
const marker314 = new maptalks.Marker(map5.getCenter(), {
symbol: [
{
markerType: "square",
markerFill: "rgba(216,115,149,0.8)",
markerWidth: 120,
markerHeight: 120,
},
{
textName: "Click\non Me",
textSize: 18,
},
],
}).addTo(layer3);
addListen();
function addListen() {
// mousemove and touchmove is annoying, so not listening to it.
marker.on("mousedown mouseup click dblclick contextmenu touchstart touchend", onEvent);
}
function removeListen() {
// mousemove and touchmove is annoying, so not listening to it.
marker.off("mousedown mouseup click dblclick contextmenu touchstart touchend", onEvent);
}
const events: any[] = [];
function onEvent(param: any) {
events.push(param);
let content = "";
for (let i = events.length - 1; i >= 0; i--) {
content +=
events[i].type +
`on
${events[i].coordinate
.toArray()
.map((c: Coordinate) => {
return c.toFixed(5);
})
.join()}
<br>`;
}
const ele = document.getElementById("events");
if (ele) {
ele.innerHTML = `<div>${content}</div>`;
}
// return false to stop event propagation
return false;
}
const marker315 = new maptalks.Marker(map5.getCenter(), {
symbol: {
textFaceName: "sans-serif",
textName: "FLASH\nME",
textFill: "#34495e",
textSize: 40,
textHaloColor: "white",
textHaloRadius: 8,
},
});
new maptalks.VectorLayer("vector", marker315).addTo(map5);
function flash() {
marker.flash(
200, // flash interval in ms
5, // count
() => {
// callback when flash end
alert("flash ended");
},
);
}
const point4 = new maptalks.Marker([-0.113049 - 0.018, 51.498568 + 0.003], {
symbol: {
textFaceName: "sans-serif",
textName: "MapTalks",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
properties: {
foo: "marker",
},
});
const line16 = new maptalks.LineString(
[
[-0.131049, 51.499568],
[-0.107049, 51.499568],
],
{
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
properties: {
foo: "linestring",
},
},
);
const polygon4 = new maptalks.Polygon(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.493568],
[-0.131049, 51.493568],
[-0.131049, 51.498568],
],
{
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
properties: {
foo: "polygon",
},
},
);
const collection = new maptalks.GeometryCollection([line16, polygon4, point4], {
visible: true,
editable: true,
cursor: null,
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false,
drawOnAxis: null,
});
new maptalks.VectorLayer("vector", collection).addTo(map5);
// filter
function filter() {
// condition can be a mapbox filter or a function
const filtered = collection.filter(["==", "foo", "polygon"]);
filtered.forEach((polygon: Geometry) => {
polygon.updateSymbol({
polygonFill: "#f00",
});
});
}
// point with altitude
const point41 = new maptalks.Marker([-0.113049, 51.498568], {
properties: {
altitude: 400,
},
});
// same point without altitude
const point0 = new maptalks.Marker([-0.113049, 51.498568]).updateSymbol({
markerOpacity: 0.5,
markerFill: "#bbb",
});
new maptalks.VectorLayer("vector", [point0, point41], {
enableAltitude: true, // enable altitude
altitudeProperty: "altitude", // altitude property in properties, default by 'altitude'
}).addTo(map5);
new maptalks.VectorLayer("vector", null, {
enableAltitude: true,
// draw altitude
drawAltitude: {
lineWidth: 1,
lineColor: "#000",
},
}).addTo(map5);
map5.setPitch(60);
// line with one altitude
const line431 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.093049, 51.498568],
],
{
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
textName: "{altitude}",
textPlacement: "vertex",
},
properties: {
altitude: 200, // altitude for all vertexes
},
},
);
// line with seperate alitutdes
const line432 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.093049, 51.498568],
],
{
properties: {
altitude: [400, 600, 1200], // seperate altitude for each vertex
},
symbol: {
lineColor: "rgb(135,196,240)",
lineWidth: 3,
textName: "{altitude}",
textPlacement: "vertex",
},
},
);
// line without alitutde
const line433 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.093049, 51.498568],
],
{
symbol: {
lineColor: "#000",
lineDasharray: [10, 5, 5],
lineWidth: 3,
textName: "0",
textPlacement: "vertex",
},
},
);
new maptalks.VectorLayer("vector", [line433, line431, line432], { enableAltitude: true }).addTo(map5);
const line44 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.101049, 51.498568],
],
{
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
properties: {
altitude: [100, 400, 1200],
},
},
);
// same line without alitutde
const line440 = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
],
{
symbol: {
lineColor: "#000",
lineDasharray: [10, 5, 5],
lineWidth: 3,
},
},
);
new maptalks.VectorLayer("vector", [line44], {
enableAltitude: true,
drawAltitude: {
polygonFill: "#1bbc9b",
polygonOpacity: 0.3,
lineWidth: 0,
},
}).addTo(map5);
const rectangle2 = new maptalks.Rectangle(center.add(-0.018, 0.012), 800, 700, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#34495e",
polygonOpacity: 0.4,
},
properties: {
altitude: 100,
},
});
const circle2 = new maptalks.Circle(center.add(0.002, 0.008), 500, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#1bbc9b",
polygonOpacity: 0.4,
},
properties: {
altitude: 800,
},
});
const sector2 = new maptalks.Sector(center.add(-0.013, -0.001), 900, 240, 300, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.4,
},
properties: {
altitude: 600,
},
});
const ellipse2 = new maptalks.Ellipse(center.add(0.003, -0.005), 1000, 600, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(216,115,149)",
polygonOpacity: 0.4,
},
properties: {
altitude: 400,
},
});
const layer = new maptalks.VectorLayer("vector", null, { enableAltitude: true })
.addGeometry([rectangle, circle, sector, ellipse])
.addTo(map5);
// draw shadows
const shadowSymbol = {
lineColor: "#bbb",
lineDasharray: [10, 5, 5],
lineWidth: 2,
polygonFill: "#bbb",
polygonOpacity: 0.4,
};
const shadows: any = [];
layer.forEach((geo: Geometry) => {
shadows.push(geo.copy().setSymbol(shadowSymbol));
});
new maptalks.VectorLayer("shadows", shadows).addTo(map5).bringToBack();
new maptalks.Marker(center.sub(0.009, 0), {
symbol: {
markerFile: "1.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
const marker2 = new maptalks.Marker(center.sub(0.006, 0), {
symbol: {
markerFile: "2.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
const marker3 = new maptalks.Marker(center.sub(0.003, 0), {
symbol: {
markerFile: "3.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
const marker4 = new maptalks.Marker(center, {
symbol: {
markerFile: "4.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
const marker5 = new maptalks.Marker(center.add(0.003, 0), {
symbol: {
markerFile: "5.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
const marker6 = new maptalks.Marker(center.add(0.006, 0), {
symbol: {
markerFile: "6.png",
markerWidth: 28,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
new maptalks.Marker([-0.113049, 51.49856], {
symbol: [
{
markerFile: "avatar.jpg",
markerWidth: 29,
markerHeight: 29,
markerDy: -20,
},
{
markerFile: "marker.png",
},
],
}).addTo(layer);
new maptalks.Marker(c.sub(0.02, 0), {
symbol: {
markerType: "ellipse",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 1,
markerLineColor: "#34495e",
markerLineWidth: 3,
markerLineOpacity: 1,
markerLineDasharray: [],
markerWidth: 40,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
new maptalks.Marker(c.sub(0.015, 0), {
symbol: {
markerType: "cross",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 1,
markerLineColor: "#34495e",
markerLineWidth: 3,
markerLineOpacity: 1,
markerLineDasharray: [],
markerWidth: 40,
markerHeight: 40,
markerDx: 0,
markerDy: 0,
markerOpacity: 1,
},
}).addTo(layer);
new maptalks.Marker([-0.109049, 51.49856], {
symbol: {
markerType: "ellipse",
markerFill: {
type: "linear",
places: [0, 0, 1, 1],
colorStops: [
[0.0, "#fff"],
[0.5, "#fff27e"],
[1, "#f87e4b"],
],
},
markerLineWidth: 0,
markerWidth: 100,
markerHeight: 100,
},
}).addTo(layer);
new maptalks.Marker([-0.113049, 51.49856], {
symbol: {
markerType: "path",
markerPath: getTigerPath(),
markerPathWidth: 540,
markerPathHeight: 580,
// 'markerFill': '#6fa8dc', // will override tiger path's style properties
// 'markerLineColor' : 12,
markerWidth: 400,
markerHeight: 400,
markerDy: 200,
markerDx: 0,
},
}).addTo(layer);
function getTigerPath(): string {
return "";
}
new maptalks.Marker(center.add(0.01, 0), {
symbol: {
textName: "m4",
textSize: 14,
markerFile: "m4.png",
markerHorizontalAlignment: "middle", // left, middle(default), right
markerVerticalAlignment: "middle", // top, middle, bottom(default)
},
}).addTo(layer);
new maptalks.Marker([-0.113049, 51.49856], {
properties: {
name: "Hello\nMapTalks",
},
symbol: {
textFaceName: "sans-serif",
textName: "{name}", // value from name in geometry's properties
textWeight: "normal", // 'bold', 'bolder'
textStyle: "normal", // 'italic', 'oblique'
textSize: 40,
textFont: null, // same as CanvasRenderingContext2D.font, override textName, textWeight and textStyle
textFill: "#34495e",
textOpacity: 1,
textHaloFill: "#fff",
textHaloRadius: 5,
textWrapWidth: null,
textWrapCharacter: "\n",
textLineSpacing: 0,
textDx: 0,
textDy: 0,
textHorizontalAlignment: "middle", // left | middle | right | auto
textVerticalAlignment: "middle", // top | middle | bottom | auto
textAlign: "center", // left | right | center | auto
},
}).addTo(layer);
new maptalks.LineString([map5.getCenter().sub(0.1, 0), map5.getCenter().add(0.1, 0)], {
symbol: {
linePatternFile: "line-pattern.png",
lineWidth: 20,
},
}).addTo(layer);
new maptalks.LineString([map5.getCenter().sub(0.1, 0), map5.getCenter().add(0.1, 0), map5.getCenter().add(0.1, -0.1)], {
arrowStyle: "classic", // we only have one arrow style now
arrowPlacement: "vertex-firstlast", // vertex-first, vertex-last, vertex-firstlast, point
symbol: {
lineColor: "#1bbc9b",
lineWidth: 8,
},
}).addTo(layer);
new maptalks.LineString(
[
c.add(-0.0202, 0.0081),
c.add(-0.0269, 0.0069),
c.add(-0.0369, 0.0032),
c.add(-0.0314, -0.003),
c.add(-0.0278, -0.008),
c.add(-0.022, -0.009),
],
{
symbol: {
lineColor: "#f00",
shadowBlur: 10,
shadowOffsetX: 10,
shadowOffsetY: 10,
},
},
)
.translate(0.04, 0)
.addTo(layer);
new maptalks.Marker(map5.getCenter(), {
symbol: [
{
markerType: "ellipse",
markerFill: "#fff",
markerFillOpacity: 1,
markerWidth: 20,
markerHeight: 20,
markerLineWidth: 0,
},
{
markerType: "ellipse",
markerFill: "#1bc8ff",
markerFillOpacity: 0.9,
markerWidth: 55,
markerHeight: 55,
markerLineWidth: 0,
},
{
markerType: "ellipse",
markerFill: "#0096cd",
markerFillOpacity: 0.8,
markerWidth: 91,
markerHeight: 91,
markerLineWidth: 0,
},
{
markerType: "ellipse",
markerFill: "#0096cd",
markerFillOpacity: 0.3,
markerWidth: 130,
markerHeight: 130,
markerLineWidth: 0,
},
{
markerType: "ellipse",
markerFill: "#0096cd",
markerFillOpacity: 0.2,
markerWidth: 172,
markerHeight: 172,
markerLineWidth: 0,
},
],
}).addTo(layer);
new maptalks.ui.UIMarker([-0.113049, 51.49856], {
draggable: true,
single: false,
content: '<div class="text_marker">HTML Marker</div>',
});
marker2.addTo(map5).show();
new maptalks.Marker(map5.getCenter(), {
symbol: {
textName: "Layer is added.",
textWeight: "bold",
textSize: 50,
textFill: "#1bbc9b",
textHaloFill: "#fff",
textHaloRadius: 5,
},
});
new maptalks.VectorLayer("vector", [marker2]).addTo(map5);
map5.addLayer(layer);
map5.removeLayer(layer);
layer.show();
layer.hide();
const rect11 = new maptalks.Rectangle(map5.getCenter().add(-0.025, 0.005), 1600, 1000, {
symbol: [
{
lineColor: "#34495e",
lineWidth: 3,
polygonFill: "#1bbc9b",
},
{
textName: "70%",
textWeight: "bold",
textSize: 30,
textFill: "#fff",
},
],
});
const rect2 = rect11
.copy()
.translate([0.03, 0])
.updateSymbol([{}, { textName: "40%" }]);
const layer1 = new maptalks.VectorLayer("vector1", [rect11], {
opacity: 0.7,
}).addTo(map5);
const layer2 = new maptalks.VectorLayer("vector2", [rect2], {
opacity: 0.4,
}).addTo(map5);
layer1.bringToBack();
layer2.bringToFront();
map5.on("mousemove", (e: any) => {
if (!layer.getMask()) {
layer.setMask(
new maptalks.Marker(e.coordinate, {
symbol: {
markerType: "ellipse",
markerWidth: 200,
markerHeight: 200,
},
}),
);
} else {
(<maptalks.Marker> layer.getMask()).setCoordinates(e.coordinate);
}
});
const marker55 = new maptalks.Marker(
center, // .add(-0.018,0.007).toArray(),
{
symbol: {
textFaceName: '"microsoft yahei",arial,sans-serif',
textName: "MapTalks",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
},
);
const polyline55 = new maptalks.LineString(
[
center, // .add(-0.018,0.005).toArray(),
center.add(0.006, 0.005).toArray(),
],
{
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
},
);
const polygon55 = new maptalks.Polygon(
[
center.add(-0.018, 0.004).toArray(),
center.add(0.006, 0.004).toArray(),
center.add(0.006, -0.001).toArray(),
center.add(-0.018, -0.001).toArray(),
center.add(-0.018, 0.004).toArray(),
],
{
id: "cc",
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
new maptalks.VectorLayer("vector").addGeometry([marker55, polyline55, polygon55]).addTo(map5);
new maptalks.VectorLayer("vector")
.setStyle({
filter: ["count", ">=", 0],
symbol: getSymbol("#747474"),
})
.addTo(map5);
const rect3 = new maptalks.Rectangle(map5.getCenter().sub(0.025, 0.0035), 1200, 1000, {
symbol: [
{
lineColor: "#34495e",
lineWidth: 3,
polygonFill: "#1bbc9b",
polygonOpacity: 1,
},
{
textName: "3",
textWeight: "bold",
textSize: 30,
textFill: "#fff",
},
],
});
const rect22 = rect3
.copy()
.translate([0.006, 0.006])
.updateSymbol([{ polygonFill: "rgb(216,115,149)" }, { textName: "2" }]);
const rect1 = rect22
.copy()
.translate([0.006, 0.006])
.updateSymbol([{ polygonFill: "rgb(135,196,240)" }, { textName: "1" }]);
// sort to 3,2,1
function sort1() {
rect3.bringToFront();
rect1.bringToBack();
}
// sort to 1,2,3
function sort2() {
rect1.setZIndex(3);
rect2.setZIndex(2);
rect3.setZIndex(1);
}
(<VectorLayer> map5.getLayer("v")).addGeometry([rect3, rect2, rect1]);
const canvasLayer = new maptalks.CanvasLayer("c", {
forceRenderOnMoving: true,
forceRenderOnZooming: true,
});
canvasLayer.prepareToDraw = (/* context */) => {
return ["foo", "bar"];
};
// param1 and param2 are prepareToDraw's return values.
canvasLayer.draw = function(context, view, param1, param2) {
const size = map5.getSize();
const str222 = `${param1},${param2}`;
context.fillStyle = "#f00";
context.font = "bolder 50px sans-serif";
const len = context.measureText(str222);
context.fillText(str222, size.width / 2 - len.width / 2, size.height / 2);
this.completeRender();
};
// draw when map is interacting
canvasLayer.drawOnInteracting = function(context, view, param1, param2) {
this.draw(context, view, param1, param2);
};
map5.addLayer(canvasLayer);
// An animated particle circle
const particles = new maptalks.ParticleLayer("c", {
forceRenderOnMoving: true,
});
// circle's radius in meters
const radius = 1000;
particles.getParticles = (t: number) => {
map5.coordinateToContainerPoint(center);
};
map5.addLayer(particles);
new maptalks.Marker(center, {
symbol: {
markerType: "cross",
markerWidth: 10,
markerHeight: 10,
markerLineWidth: 2,
},
}).addTo(map5.getLayer("v"));
new maptalks.Circle(center, 1000, {
symbol: {
lineColor: "#fff",
lineWidth: 6,
lineOpacity: 0.2,
polygonOpacity: 0,
},
}).addTo(map5.getLayer("v"));
const layerOrder = ["earth", "landuse", "water", "roads", "building"];
// draw mapzen's geojson vector tile with CanvasTileLayer
const canvasTile = new maptalks.CanvasTileLayer("tile", {
urlTemplate: "https:// tile.mapzen.com/mapzen/vector/v1/all/{z}/{x}/{y}.json?api_key=mapzen-cGRKZj",
attribution: '© <a href="https:// mapzen.com/" target="_blank">mapzen</a>',
});
canvasTile.drawTile = (canvas, tileContext, onComplete) => {
maptalks.Ajax.getJSON(tileContext.url, (err: any, data: any) => {
if (err) {
throw err;
}
const layers = [];
let loaded = 0;
function onLayerLoaded() {
loaded++;
if (loaded === layers.length) {
onComplete(null);
}
}
const mapzenStyle = getMapZenStyle();
// prepare a VectorLayer per mapzen's layer
for (let i = 0, l = layerOrder.length; i < l; i++) {
const name = layerOrder[i];
if (!data[name]) {
continue;
}
const style = mapzenStyle[name];
layers.push(
new maptalks.VectorLayer(name, maptalks.GeoJSON.toGeometry(data[name]), {
style,
enableSimplify: false,
geometryEvents: false,
}).on("layerload", onLayerLoaded),
);
}
// create a map instance on tile's canvas
new maptalks.Map(canvas, {
center: tileContext.center,
zoom: tileContext.z,
layers,
});
});
};
new maptalks.Map("map", {
center: [-122.12258202067433, 38.080679835385574],
zoom: 9,
centerCross: true,
baseLayer: canvasTile,
});
function getMapZenStyle(): any {
return {
roads: [
{
filter: ["==", "kind", "highway"],
symbol: [
{
lineColor: "grey",
lineWidth: 7,
},
{
lineColor: "#cc6666",
lineWidth: 4,
},
],
},
{
filter: ["==", "kind", "minor_road"],
symbol: {
lineColor: "lightgrey",
lineWidth: 3,
},
},
{
filter: true,
symbol: {
lineColor: "lightgrey",
lineWidth: 2,
},
},
],
buildings: [
{
filter: true,
symbol: {
lineColor: "#000",
polygonFill: "#fff",
},
},
],
water: [
{
filter: ["==", "$type", "Point"],
symbol: {
markerOpacity: 0,
markerType: "ellipse",
markerFill: "#88bbee",
markerWidth: 4,
markerHeight: 4,
},
},
{
filter: true,
symbol: {
lineColor: "#88bbee",
polygonFill: "#88bbee",
},
},
],
earth: [
{
filter: ["==", "$type", "Point"],
symbol: {
markerOpacity: 0,
markerType: "ellipse",
markerFill: "#ddeeee",
markerWidth: 4,
markerHeight: 4,
},
},
{
filter: true,
symbol: {
lineColor: "#ddeeee",
polygonFill: "#ddeeee",
},
},
],
landuse: [
{
filter: ["==", "$type", "Point"],
symbol: {
markerOpacity: 0,
markerType: "ellipse",
markerFill: "#aaffaa",
markerWidth: 4,
markerHeight: 4,
},
},
{
filter: true,
symbol: {
lineColor: "#aaffaa",
polygonFill: "#aaffaa",
},
},
],
};
}
const layer616 = new maptalks.TileLayer("light", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
// force layer to render when map is zooming and moving
forceRenderOnMoving: true,
forceRenderOnZooming: true,
});
new maptalks.Map("map", {
center: [121.4, 37.5],
zoom: 13,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
}),
layers: [layer616],
});
const swipe = document.getElementById("swipe");
const renderer = layer616.getRenderer();
const canvasGetter = renderer.getCanvasImage;
// override renderer's default method to get layer canvas image
renderer.getCanvasImage = () => {
const dpr = map5.getDevicePixelRatio();
// original layer canvas image
const layerImage = canvasGetter.call(renderer);
if (!layerImage || !layerImage.image) {
return layerImage;
}
// drawn width after layer is erased by swipper
const ctx = renderer.context;
// const width = renderer.canvas.width * (swipe.value / 100);
const width = renderer.canvas.width;
const height = ctx.canvas.height;
// copy drawn rect of original layer canvas
const drawnRect = document.createElement("canvas");
drawnRect.width = width;
drawnRect.height = ctx.canvas.height;
const drawnRect2dContext = drawnRect.getContext("2d");
if (drawnRect2dContext) {
drawnRect2dContext.drawImage(layerImage.image, 0, 0);
}
// clear the erased part
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
// draw a white background to cover the bottom layers when zooming
ctx.beginPath();
ctx.rect(0, 0, width / dpr, height / dpr);
ctx.fillStyle = "#fff";
ctx.fill();
// draw the drawn part on layer's canvas
ctx.drawImage(drawnRect, 0, 0, width / dpr, height / dpr);
layerImage.image = ctx.canvas;
return layerImage;
};
// swipe.addEventListener('input', function () {
// // const layer redraw self in the next frame
// layer.getRenderer().setToRedraw();
// });
const imageLayer = new maptalks.ImageLayer("images", [
{
url: "1.png",
extent: [-0.11854216406254636, 51.50043810048564, -0.09081885168461667, 51.50994770979011],
opacity: 1,
},
{
url: "2.png",
extent: [-0.10343596289067136, 51.50797115663946, -0.07897421667485105, 51.51876102463089],
opacity: 1,
},
]);
map5.addLayer(imageLayer);
const map71 = new maptalks.Map("map", {
center: [-0.113049, 51.498568],
zoom: 14,
draggable: false, // disable drag
dragPan: false, // disable drag panning
dragRotate: false, // disable drag rotation
dragPitch: false, // disable drag pitch
scrollWheelZoom: false, // disable wheel zoom
touchZoom: false, // disable touchzoom
doubleClickZoom: false, // disable doubleclick zoom
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
function dragOn() {
map5.config("draggable", true);
}
function dragOff() {
map5.config("draggable", false);
}
function zoomOn() {
map5.config("zoomable", true);
}
function zoomOff() {
map5.config("zoomable", false);
}
function scrollOn() {
map5.config("scrollWheelZoom", true);
}
function scrollOff() {
map5.config("scrollWheelZoom", false);
}
function touchZoomOn() {
map5.config("touchZoom", true);
}
function touchZoomOff() {
map5.config("touchZoom", false);
}
function dblClickOn() {
map5.config("doubleClickZoom", true);
}
function dblClickOff() {
map5.config("doubleClickZoom", false);
}
const items = [
["Drag", dragOn, dragOff],
["Zoom", zoomOn, zoomOff],
["ScrollWheel", scrollOn, scrollOff],
["TouchZoom", touchZoomOn, touchZoomOff],
["DblClick", dblClickOn, dblClickOff],
].map(value => {
return {
item: value[0],
children: [
{
item: "ON",
click: value[1],
},
{
item: "OFF",
click: value[2],
},
],
};
});
new maptalks.control.Toolbar({
items,
}).addTo(map5);
const distanceTool = new maptalks.DistanceTool({
symbol: {
lineColor: "#34495e",
lineWidth: 2,
},
vertexSymbol: {
markerType: "ellipse",
markerFill: "#1bbc9b",
markerLineColor: "#000",
markerLineWidth: 3,
markerWidth: 10,
markerHeight: 10,
},
labelOptions: {
textSymbol: {
textFaceName: "monospace",
textFill: "#fff",
textLineSpacing: 1,
textHorizontalAlignment: "right",
textDx: 15,
markerLineColor: "#b4b3b3",
markerFill: "#000",
},
boxStyle: {
padding: [6, 2],
symbol: {
markerType: "square",
markerFill: "#000",
markerFillOpacity: 0.9,
markerLineColor: "#b4b3b3",
},
},
},
clearButtonSymbol: [
{
markerType: "square",
markerFill: "#000",
markerLineColor: "#b4b3b3",
markerLineWidth: 2,
markerWidth: 15,
markerHeight: 15,
markerDx: 20,
},
{
markerType: "x",
markerWidth: 10,
markerHeight: 10,
markerLineColor: "#fff",
markerDx: 20,
},
],
language: "en-US",
}).addTo(map5);
const areaTool = new maptalks.AreaTool({
symbol: {
lineColor: "#1bbc9b",
lineWidth: 2,
polygonFill: "#fff",
polygonOpacity: 0.3,
},
vertexSymbol: {
markerType: "ellipse",
markerFill: "#34495e",
markerLineColor: "#1bbc9b",
markerLineWidth: 3,
markerWidth: 10,
markerHeight: 10,
},
labelOptions: {
textSymbol: {
textFaceName: "monospace",
textFill: "#fff",
textLineSpacing: 1,
textHorizontalAlignment: "right",
textDx: 15,
},
boxStyle: {
padding: [6, 2],
symbol: {
markerType: "square",
markerFill: "#000",
markerFillOpacity: 0.9,
markerLineColor: "#b4b3b3",
},
},
},
clearButtonSymbol: [
{
markerType: "square",
markerFill: "#000",
markerLineColor: "#b4b3b3",
markerLineWidth: 2,
markerWidth: 15,
markerHeight: 15,
markerDx: 22,
},
{
markerType: "x",
markerWidth: 10,
markerHeight: 10,
markerLineColor: "#fff",
markerDx: 22,
},
],
language: "",
}).addTo(map5);
const drawTool = new maptalks.DrawTool({
mode: "Point",
})
.addTo(map5)
.disable();
drawTool.on("drawend", (param: any) => {
console.log(param.geometry);
layer.addGeometry(param.geometry);
});
const itemsc = [
"Point",
"LineString",
"Polygon",
"Circle",
"Ellipse",
"Rectangle",
"FreeHandLineString",
"FreeHandPolygon",
].map(value => {
return {
item: value,
click() {
drawTool.setMode(value).enable();
},
};
});
const toolbar = new maptalks.control.Toolbar({
items: [
{
item: "Shape",
children: itemsc,
},
{
item: "Disable",
click() {
drawTool.disable();
},
},
{
item: "Clear",
click() {
layer.clear();
},
},
],
}).addTo(map5);
const marker56 = new maptalks.Marker(center.add(-0.018, 0.007).toArray(), {
draggable: true,
symbol: {
textFaceName: '"microsoft yahei",arial,sans-serif',
textName: "Try to Drag Us",
textFill: "#34495e",
textHorizontalAlignment: "right",
textSize: 40,
},
});
const polyline = new maptalks.LineString([center.add(-0.018, 0.005).toArray(), center.add(0.006, 0.005).toArray()], {
draggable: true,
symbol: {
lineColor: "#1bbc9b",
lineWidth: 5,
},
});
const polygon56 = new maptalks.Polygon(
[
center.add(-0.018, 0.004).toArray(),
center.add(0.006, 0.004).toArray(),
center.add(0.006, -0.001).toArray(),
center.add(-0.018, -0.001).toArray(),
center.add(-0.018, 0.004).toArray(),
],
{
draggable: true,
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
const geometriesc = [marker56, polyline, polygon56];
new maptalks.VectorLayer("vector").addGeometry(geometriesc).addTo(map5);
const point = new maptalks.Marker([-0.113049, 51.498568], {
visible: true,
editable: true,
cursor: "pointer",
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
markerType: "ellipse",
markerWidth: 40,
markerHeight: 40,
markerFill: "rgb(216,115,149)",
markerLineColo: "#fff",
},
});
new maptalks.VectorLayer("vector", point).addTo(map5);
startEdit();
function startEdit() {
point.startEdit();
}
function endEdit() {
point.endEdit();
}
const line = new maptalks.LineString(
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
],
{
arrowStyle: null, // arrow-style : now we only have classic
arrowPlacement: "vertex-last", // arrow's placement: vertex-first, vertex-last, vertex-firstlast, point
visible: true,
editable: true,
cursor: null,
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#1bbc9b",
lineWidth: 3,
},
},
);
new maptalks.VectorLayer("vector", line).addTo(map5);
startEditLine();
function startEditLine() {
line.startEdit();
}
function endEditLine() {
line.endEdit();
}
const polygon57 = new maptalks.Polygon(
[
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.493568],
[-0.131049, 51.493568],
[-0.131049, 51.498568],
],
],
{
visible: true,
editable: true,
cursor: "pointer",
shadowBlur: 0,
shadowColor: "black",
draggable: false,
dragShadow: false, // display a shadow during dragging
drawOnAxis: null, // force dragging stick on a axis, can be: x, y
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
new maptalks.VectorLayer("vector", polygon57).addTo(map5);
startEditPolygon();
function startEditPolygon() {
polygon.startEdit();
}
function endEditPolygon() {
polygon.endEdit();
}
const rectangle3 = new maptalks.Rectangle(center.add(-0.018, 0.012), 800, 700, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#34495e",
polygonOpacity: 0.4,
},
});
const circle3 = new maptalks.Circle(center.add(0.002, 0.008), 500, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "#1bbc9b",
polygonOpacity: 0.4,
},
});
const ellipse3 = new maptalks.Ellipse(center.add(0.003, -0.005), 1000, 600, {
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(216,115,149)",
polygonOpacity: 0.4,
},
});
new maptalks.VectorLayer("vector").addGeometry([rectangle, circle, ellipse]).addTo(map5);
startEditcc();
function startEditcc() {
rectangle.startEdit();
circle.startEdit();
ellipse.startEdit();
}
function endEditcc() {
rectangle.endEdit();
circle.endEdit();
ellipse.endEdit();
}
const textbox = new maptalks.TextBox(
"This is a textbox, with very long content", // content
[-0.113049, 51.498568], // coordinate
200, // width
90, // height
{
draggable: true,
textStyle: {
wrap: true, // auto wrap text
padding: [12, 8], // padding of textbox
verticalAlignment: "top",
horizontalAlignment: "right",
symbol: {
textFaceName: "monospace",
textFill: "#34495e",
textHaloFill: "#fff",
textHaloRadius: 4,
textSize: 18,
textWeight: "bold",
},
},
boxSymbol: {
// box's symbol
markerType: "square",
markerFill: "rgb(135,196,240)",
markerFillOpacity: 0.9,
markerLineColor: "#34495e",
markerLineWidth: 1,
},
},
);
new maptalks.VectorLayer("vector", textbox).addTo(map5);
startEditTextBox();
function startEditTextBox() {
textbox.startEdit();
}
function endEditTextBox() {
textbox.endEdit();
}
const label = new maptalks.Label("label with box", [-0.117, 51.496], {
textSymbol: {
textFaceName: "sans-serif",
textFill: "#fff",
textSize: 18,
},
boxStyle: {
padding: [12, 8],
symbol: {
markerType: "square",
markerFillOpacity: 0.9,
markerLineColor: "#34495e",
markerFill: "#34495e",
markerLineWidth: 1,
},
},
}).addTo(layer);
label.startEditText();
label.endEditText();
map5.on("click", (e: any) => {
// reset colors
layer.forEach((g: Geometry) => {
g.updateSymbol({
markerFill: "#0e595e",
});
});
// identify
map5.identify(
{
coordinate: e.coordinate,
layers: [layer],
},
(geos: Geometry[]) => {
if (geos.length === 0) {
return;
}
geos.forEach(g => {
g.updateSymbol({
markerFill: "#f00",
});
});
},
);
});
// prepare data
map5.animateTo(
{
center: [-74.10704772446428, 40.66032606133018],
zoom: 18,
pitch: 65,
bearing: 360,
},
{
duration: 7000,
},
);
const polygon = new maptalks.Polygon(
[
[
[-0.131049, 51.498568],
[-0.107049, 51.498568],
[-0.107049, 51.493568],
[-0.131049, 51.493568],
[-0.131049, 51.498568],
],
],
{
visible: false,
symbol: {
lineColor: "#34495e",
lineWidth: 2,
polygonFill: "rgb(135,196,240)",
polygonOpacity: 0.6,
},
},
);
new maptalks.VectorLayer("vector", polygon).addTo(map5);
replay();
function replay() {
polygon.hide();
// polygon's animateShow
polygon.animateShow(
{
duration: 1500,
easing: "out",
},
(frame: maptalks.animation.Frame) => {
if (frame.state.playState === "finished") {
console.log("finished");
}
},
);
}
const targetStyles = {
symbol: {
markerWidth: 200,
markerHeight: 200,
},
};
// animate by maptalks.animation.Animation
const player = maptalks.animation.Animation.animate(
targetStyles,
{
duration: 1000,
easing: "out",
},
// callback of each frame
(frame: maptalks.animation.Frame) => {
if (frame.state.playState === "running") {
marker.updateSymbol(frame.styles.symbol);
}
},
);
setTimeout(() => {
player.play();
}, 600);
const json = {
type: "Feature",
geometry: {
type: "Point",
coordinates: [-0.113049, 51.498568],
},
properties: {
name: "point marker",
},
};
maptalks.SpatialReference.getProjectionInstance("");
new maptalks.CRS("", {});
maptalks.GeoJSON.toGeometry(json).addTo(map5.getLayer("v"));
const marker = new maptalks.Marker([-0.113049, 51.498568], {
properties: {
name: "point marker",
},
}).addTo(map5.getLayer("v"));
JSON.stringify(marker.toGeoJSON());
map5.toJSON();
const cc = [-0.113049, 51.498568];
new maptalks.Map("map", {
center: c,
zoom: 13,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
layers: [
new maptalks.VectorLayer("v0", [new maptalks.Marker(cc)]),
new maptalks.VectorLayer("v1", [new maptalks.Rectangle(cc, 1000, 800)]),
],
});
new maptalks.Marker(c);
new maptalks.Rectangle(c, 1000, 800);
(<VectorLayer> map5.getLayer("v")).addGeometry(marker, rect);
const map1 = new maptalks.Map("map1", {
center: c,
zoom: 13,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
const newLayer = new maptalks.VectorLayer("v").addTo(map1);
// copy geometry by JSON
maptalks.Geometry.fromJSON(rect.toJSON()).addTo(newLayer);
const optionscc = {
content: "",
// override parent's animationOnHide option
animationOnHide: false,
};
class MyUI extends maptalks.ui.UIComponent {
constructor(coordinate: Coordinate, options: object) {
super(options);
this._coordinate = coordinate;
}
buildOn(map: maptalks.Map) {
const dom = document.createElement("div");
dom.className = "my-ui";
dom.innerText = this.options["content"];
return dom;
}
getOffset() {
const size = this.getSize();
// move anchor to center of UI
return new maptalks.Point(-size.width / 2, -size.height / 2);
}
getEvents() {
return {
zoomend: this._flash,
};
}
onRemove() {
if (this._flashTimeout) {
clearTimeout(this._flashTimeout);
}
}
_flash() {
// flash after zooming.
this.hide();
this._flashTimeout = setTimeout(() => {
this.show(this._coordinate);
}, 200);
}
}
MyUI.mergeOptions(optionscc);
const map = new maptalks.Map("map", {
center: [-0.113049, 51.49856],
zoom: 14,
baseLayer: new maptalks.TileLayer("base", {
urlTemplate: "https:// {s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png",
subdomains: ["a", "b", "c", "d"],
attribution:
'© <a href="http:// osm.org">OpenStreetMap</a> contributors, © <a href="https:// carto.com/">CARTO</a>',
}),
});
const ui = new MyUI(map5.getCenter(), {
content: "Hello, MyUI",
});
ui.addTo(map5).show();
function toolbarc(text: string) {
const toolbar = new maptalks.control.Toolbar({
position: "top-right",
items: [
{
item: text,
click() {},
},
],
});
return toolbar;
}
toolbarc('<div class="attr">Click to add Marker, right click to clear</div>').addTo(map5);
class CustomTool extends maptalks.MapTool {
onEnable() {
this._markerLayer = new maptalks.VectorLayer("CustomTool_layer").addTo(this.getMap());
}
onDisable() {
if (this._markerLayer) {
this._markerLayer.remove();
}
}
getEvents() {
return {
click: this._onClick,
contextmenu: this._onRightClick,
};
}
_onClick(param: any) {
(<VectorLayer> this._markerLayer).addGeometry(new maptalks.Marker(param.coordinate));
}
_onRightClick(param: any) {
(<VectorLayer> this._markerLayer).clear();
}
}
const customTool = new CustomTool().addTo(map5);
const options = {
// 默认颜色
color: "Red",
// 默认字体
font: "30px san-serif",
};
class HelloLayer extends maptalks.Layer {
data: any;
// 构造函数
constructor(id: string | number, data?: any, options?: maptalks.LayerOptions) {
super(id, options);
this.data = data;
}
setData(data: any) {
this.data = data;
return this;
}
getData() {
return this.data;
}
}
// 定义默认的图层配置属性
HelloLayer.mergeOptions(options);
class HelloLayerRenderer extends maptalks.renderer.CanvasRenderer {
_drawnData: any;
checkResources() {
// HelloLayer只是绘制文字, 没有外部图片, 所以返回空数组
return [];
}
draw() {
const drawn = this._drawData(this.layer.getData(), this.layer.options.color);
// 记录下绘制过的数据
this._drawnData = drawn;
// 结束绘制:
// 1. 触发必要的事件
// 2. 将渲染器的canvas设为更新状态, map会加载canvas并呈现在地图上
this.completeRender();
}
drawOnInteracting(evtParam: any) {
if (!this._drawnData || this._drawnData.length === 0) {
return;
}
this._drawData(this._drawnData, this.layer.options.color);
}
// drawOnIntearcting被略过时的回调函数
onSkipDrawOnInteracting() {}
// 当animation为true时是动画图层, 返回true
needToRedraw() {
if (this.layer.options["animation"]) {
return true;
}
return super.needToRedraw();
}
/**
* 绘制数据
*/
_drawData(data: any, color: string) {
if (!Array.isArray(data)) {
return;
}
const map = this.getMap();
// prepareCanvas是父类CanvasRenderer中的方法
// 用于准备canvas画布
// 如果canvas不存在时, 则创建它
// 如果canvas已存在, 则清空画布
this.prepareCanvas();
// this.context是渲染器canvas的CanvasRenderingContext2D
const ctx = this.context;
// 设置样式
ctx.fillStyle = color;
ctx.font = this.layer.options["font"];
const containerExtent = map5.getContainerExtent();
const drawn: any = [];
data.forEach(d => {
// 将中心点经纬度坐标转化为containerPoint
// containerPoint是指相对地图容器左上角的像素坐标.
const point = map5.coordinateToContainerPoint(new maptalks.Coordinate(d.coord));
// 如果绘制的点不在屏幕范围内, 则不做绘制以提高性能
if (!containerExtent.contains(point)) {
return;
}
const text = d.text;
const len = ctx.measureText(text);
ctx.fillText(text, point.x - len.width / 2, point.y);
drawn.push(d);
});
return drawn;
}
}
HelloLayer.registerRenderer("canvas", HelloLayerRenderer);
const layerccc = new HelloLayer("hello");
layerccc.setData([
{
coord: map5.getCenter().toArray(),
text: "Hello World",
},
{
coord: map5.getCenter().add(0.01, 0.01).toArray(),
text: "Hello World 2",
},
]);
layerccc.addTo(map5);
maptalks.INTERNAL_LAYER_PREFIX; | the_stack |
import { Callback } from "@siteimprove/alfa-callback";
import { Clone } from "@siteimprove/alfa-clone";
import { Comparable, Comparer, Comparison } from "@siteimprove/alfa-comparable";
import { Equatable } from "@siteimprove/alfa-equatable";
import { Hash } from "@siteimprove/alfa-hash";
import { Iterable } from "@siteimprove/alfa-iterable";
import { Serializable } from "@siteimprove/alfa-json";
import { Mapper } from "@siteimprove/alfa-mapper";
import { None, Option } from "@siteimprove/alfa-option";
import { Predicate } from "@siteimprove/alfa-predicate";
import { Reducer } from "@siteimprove/alfa-reducer";
import { Refinement } from "@siteimprove/alfa-refinement";
import * as builtin from "./builtin";
const { not } = Predicate;
const { compareComparable } = Comparable;
/**
* @remarks
* This is a re-export of the global `Array` interface to ensure that it merges
* with the `Array` namespace.
*
* @public
*/
export type Array<T> = globalThis.Array<T>;
/**
* @public
*/
export namespace Array {
export function isArray<T>(value: Iterable<T>): value is Array<T>;
export function isArray<T>(value: unknown): value is Array<T>;
export function isArray<T>(value: unknown): value is Array<T> {
return builtin.Array.isArray(value);
}
export function of<T>(...values: Array<T>): Array<T> {
return values;
}
export function empty<T = never>(): Array<T> {
return [];
}
export function allocate<T>(capacity: number): Array<T> {
return new builtin.Array<T>(capacity);
}
/**
* @remarks
* Unlike the built-in function of the same name, this function will pass
* along existing arrays as-is instead of returning a copy.
*/
export function from<T>(iterable: Iterable<T>): Array<T> {
if (isArray(iterable)) {
return iterable;
}
return [...iterable];
}
export function size<T>(array: ReadonlyArray<T>): number {
return array.length;
}
export function isEmpty<T>(array: ReadonlyArray<T>): array is Array<never> {
return array.length === 0;
}
export function copy<T>(array: ReadonlyArray<T>): Array<T> {
return array.slice(0);
}
export function clone<T extends Clone<T>>(array: ReadonlyArray<T>): Array<T> {
return array.map(Clone.clone);
}
export function forEach<T>(
array: ReadonlyArray<T>,
callback: Callback<T, void, [index: number]>
): void {
for (let i = 0, n = array.length; i < n; i++) {
callback(array[i], i);
}
}
export function map<T, U = T>(
array: ReadonlyArray<T>,
mapper: Mapper<T, U, [index: number]>
): Array<U> {
const result = new builtin.Array<U>(array.length);
for (let i = 0, n = array.length; i < n; i++) {
result[i] = mapper(array[i], i);
}
return result;
}
export function flatMap<T, U = T>(
array: ReadonlyArray<T>,
mapper: Mapper<T, ReadonlyArray<U>, [index: number]>
): Array<U> {
const result = empty<U>();
for (let i = 0, n = array.length; i < n; i++) {
result.push(...mapper(array[i], i));
}
return result;
}
export function flatten<T>(array: ReadonlyArray<ReadonlyArray<T>>): Array<T> {
return flatMap(array, (array) => array);
}
export function reduce<T, U = T>(
array: ReadonlyArray<T>,
reducer: Reducer<T, U, [index: number]>,
accumulator: U
): U {
for (let i = 0, n = array.length; i < n; i++) {
accumulator = reducer(accumulator, array[i], i);
}
return accumulator;
}
export function reduceWhile<T, U = T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>,
reducer: Reducer<T, U, [index: number]>,
accumulator: U
): U {
for (let i = 0, n = array.length; i < n; i++) {
const value = array[i];
if (predicate(value, i)) {
accumulator = reducer(accumulator, value, i);
} else {
break;
}
}
return accumulator;
}
export function reduceUntil<T, U = T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>,
reducer: Reducer<T, U, [index: number]>,
accumulator: U
): U {
return reduceWhile(array, not(predicate), reducer, accumulator);
}
export function apply<T, U>(
array: ReadonlyArray<T>,
mapper: ReadonlyArray<Mapper<T, U>>
): Array<U> {
return flatMap(mapper, (mapper) => map(array, mapper));
}
export function filter<T, U extends T>(
array: ReadonlyArray<T>,
refinement: Refinement<T, U, [index: number]>
): Array<U>;
export function filter<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Array<T>;
export function filter<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Array<T> {
const result = empty<T>();
for (let i = 0, n = array.length; i < n; i++) {
const value = array[i];
if (predicate(value, i)) {
result.push(value);
}
}
return result;
}
export function reject<T, U extends T>(
array: ReadonlyArray<T>,
refinement: Refinement<T, U, [index: number]>
): Array<Exclude<T, U>>;
export function reject<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Array<T>;
export function reject<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Array<T> {
return filter(array, not(predicate));
}
export function find<T, U extends T>(
array: ReadonlyArray<T>,
refinement: Refinement<T, U, [index: number]>
): Option<U>;
export function find<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Option<T>;
export function find<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Option<T> {
for (let i = 0, n = array.length; i < n; i++) {
const value = array[i];
if (predicate(value, i)) {
return Option.of(value);
}
}
return None;
}
export function findLast<T, U extends T>(
array: ReadonlyArray<T>,
refinement: Refinement<T, U, [index: number]>
): Option<U>;
export function findLast<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Option<T>;
export function findLast<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): Option<T> {
for (let i = array.length - 1; i >= 0; i--) {
const value = array[i];
if (predicate(value, i)) {
return Option.of(value);
}
}
return None;
}
export function includes<T>(array: ReadonlyArray<T>, value: T): boolean {
return some(array, Predicate.equals(value));
}
export function collect<T, U>(
array: ReadonlyArray<T>,
mapper: Mapper<T, Option<U>, [index: number]>
): Array<U> {
const result = empty<U>();
for (let i = 0, n = array.length; i < n; i++) {
for (const value of mapper(array[i], i)) {
result.push(value);
}
}
return result;
}
export function collectFirst<T, U>(
array: ReadonlyArray<T>,
mapper: Mapper<T, Option<U>, [index: number]>
): Option<U> {
for (let i = 0, n = array.length; i < n; i++) {
const value = mapper(array[i], i);
if (value.isSome()) {
return value;
}
}
return None;
}
export function some<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): boolean {
for (let i = 0, n = array.length; i < n; i++) {
if (predicate(array[i], i)) {
return true;
}
}
return false;
}
export function none<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): boolean {
return every(array, not(predicate));
}
export function every<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): boolean {
for (let i = 0, n = array.length; i < n; i++) {
if (!predicate(array[i], i)) {
return false;
}
}
return true;
}
export function count<T>(
array: ReadonlyArray<T>,
predicate: Predicate<T, [index: number]>
): number {
return reduce(
array,
(count, value, index) => (predicate(value, index) ? count + 1 : count),
0
);
}
export function distinct<T>(array: ReadonlyArray<T>): Array<T> {
const result = empty<T>();
for (let i = 0, n = array.length; i < n; i++) {
const value = array[i];
if (result.some(Predicate.equals(value))) {
continue;
}
result.push(value);
}
return result;
}
export function get<T>(array: ReadonlyArray<T>, index: number): Option<T> {
return index < array.length ? Option.of(array[index]) : None;
}
export function has<T>(array: ReadonlyArray<T>, index: number): boolean {
return index < array.length;
}
export function set<T>(array: Array<T>, index: number, value: T): Array<T> {
if (index < array.length) {
array[index] = value;
}
return array;
}
export function insert<T>(
array: Array<T>,
index: number,
value: T
): Array<T> {
if (index <= array.length) {
array.splice(index, 0, value);
}
return array;
}
export function append<T>(array: Array<T>, value: T): Array<T> {
array.push(value);
return array;
}
export function prepend<T>(array: Array<T>, value: T): Array<T> {
array.unshift(value);
return array;
}
export function concat<T>(
array: ReadonlyArray<T>,
...iterables: Array<Iterable<T>>
): Array<T> {
return [...Iterable.concat(array, ...iterables)];
}
export function subtract<T>(
array: ReadonlyArray<T>,
...iterables: Array<Iterable<T>>
): Array<T> {
return [...Iterable.subtract(array, ...iterables)];
}
export function intersect<T>(
array: ReadonlyArray<T>,
...iterables: Array<Iterable<T>>
): Array<T> {
return [...Iterable.intersect(array, ...iterables)];
}
export function zip<T, U = T>(
array: ReadonlyArray<T>,
iterable: Iterable<U>
): Array<[T, U]> {
const result = empty<[T, U]>();
const it = Iterable.iterator(iterable);
for (let i = 0, n = array.length; i < n; i++) {
const next = it.next();
if (next.done === true) {
break;
}
result.push([array[i], next.value]);
}
return result;
}
export function first<T>(array: ReadonlyArray<T>): Option<T> {
return array.length > 0 ? Option.of(array[0]) : None;
}
export function last<T>(array: ReadonlyArray<T>): Option<T> {
return array.length > 0 ? Option.of(array[array.length - 1]) : None;
}
export function sort<T extends Comparable<T>>(array: Array<T>): Array<T> {
return sortWith(array, compareComparable);
}
export function sortWith<T>(
array: Array<T>,
comparer: Comparer<T>
): Array<T> {
return array.sort(comparer);
}
export function compare<T extends Comparable<U>, U = T>(
a: ReadonlyArray<T>,
b: Iterable<U>
): Comparison {
return compareWith(a, b, compareComparable);
}
export function compareWith<T, U = T>(
a: ReadonlyArray<T>,
b: Iterable<U>,
comparer: Comparer<T, U, [index: number]>
): Comparison {
return Iterable.compareWith(a, b, comparer);
}
export function search<T>(
array: ReadonlyArray<T>,
value: T,
comparer: Comparer<T>
): number {
let lower = 0;
let upper = array.length - 1;
while (lower <= upper) {
const middle = (lower + (upper - lower) / 2) >>> 0;
switch (comparer(value, array[middle]!)) {
case Comparison.Greater:
lower = middle + 1;
break;
case Comparison.Less:
upper = middle - 1;
break;
case Comparison.Equal:
return middle;
}
}
return lower;
}
export function equals<T>(a: ReadonlyArray<T>, b: ReadonlyArray<T>): boolean {
if (a.length !== b.length) {
return false;
}
for (let i = 0, n = a.length; i < n; i++) {
if (!Equatable.equals(a[i], b[i])) {
return false;
}
}
return true;
}
export function hash<T>(array: ReadonlyArray<T>, hash: Hash): void {
for (let i = 0, n = array.length; i < n; i++) {
hash.writeUnknown(array[i]);
}
hash.writeUint32(array.length);
}
export function iterator<T>(array: ReadonlyArray<T>): Iterator<T> {
return array[Symbol.iterator]();
}
export function toJSON<T>(
array: ReadonlyArray<T>
): Array<Serializable.ToJSON<T>> {
return array.map((value) => Serializable.toJSON(value));
}
} | the_stack |
import { Component, ViewChild, ElementRef, ChangeDetectorRef, NgZone } from '@angular/core';
import {
IonicPage, ViewController, AlertController, MenuController, ActionSheetController,
LoadingController, NavController, ToastController
} from 'ionic-angular';
import marked from 'marked';
import { Storage } from '@ionic/storage';
import { FormBuilder, FormGroup, Validators, FormControl } from '@angular/forms';
import { SteeemActionsProvider } from 'providers/steeem-actions/steeem-actions';
import { AlertsProvider } from 'providers/alerts/alerts';
import { CameraProvider } from 'providers/camera/camera';
import { TranslateService } from '@ngx-translate/core';
@IonicPage()
@Component({
selector: 'page-post',
templateUrl: 'post.html',
})
export class PostPage {
@ViewChild('myInput') myInput: ElementRef;
private caret: number = 0;
private is_preview: boolean = false;
private markdowntext;
private rewards: string = '50%'
private storyForm: FormGroup;
private upvote: boolean = false;
constructor(private viewCtrl: ViewController,
private actionSheetCtrl: ActionSheetController,
private formBuilder: FormBuilder,
private cdr: ChangeDetectorRef,
private zone: NgZone,
private steemActions: SteeemActionsProvider,
private navCtrl: NavController,
public menu: MenuController,
private translate: TranslateService,
private alerts: AlertsProvider,
private camera: CameraProvider,
public storage: Storage,
public alertCtrl: AlertController,
public loadingCtrl: LoadingController,
public toastCtrl: ToastController) {
this.storyForm = this.formBuilder.group({
title: ['', Validators.required],
description: ['', Validators.required],
tags: ['', Validators.required]
});
}
ionViewCanLeave(): boolean {
if (this.is_preview === true) {
this.showPreview();
this.cdr.detectChanges();
return false;
}
else {
return true;
}
}
ionViewDidLoad(): void {
this.storage.get('title').then((title) => {
if (title) {
this.insertTitle(title);
}
});
this.storage.get('description').then((description) => {
if (description) {
this.insertText(description);
}
});
this.storage.get('tags').then((tags) => {
if (tags) {
this.insertTags(tags);
}
});
}
ionViewDidLeave(): void {
this.storage.set('title', this.storyForm.controls['title'].value).then(() => { });
this.storage.set('description', this.storyForm.controls['description'].value).then(() => { });
this.storage.set('tags', this.storyForm.controls['tags'].value).then(() => { });
}
ionViewDidEnter(): void {
this.menu.enable(false);
}
ionViewWillLeave(): void {
this.menu.enable(true);
}
public deleteDraft() {
this.storage.ready().then(() => {
this.storage.remove('title').then(() => { });
this.storage.remove('description').then(() => { });
this.storage.remove('tags').then((res) => { });
});
}
/**
* Method to insert text at current pointer
* @param {String} text: Text to insert
*/
insertText(text: string): void {
const current = this.storyForm.value.description.toString();
let final = current.substr(0, this.caret) + text + current.substr(this.caret);
this.storyForm.controls["description"].setValue(final);
}
insertTitle(text) {
const current = this.storyForm.value.title.toString();
let final = current.substr(0, this.caret) + text + current.substr(this.caret);
this.storyForm.controls["title"].setValue(final);
}
insertTags(text) {
const current = this.storyForm.value.tags.toString();
let final = current.substr(0, this.caret) + text + current.substr(this.caret);
this.storyForm.controls["tags"].setValue(final);
}
/**
* Method to switch view to preview mode
*/
showPreview(): void {
this.zone.run(() => {
if (this.is_preview == false) {
let plainText = this.storyForm.value.description;
this.markdowntext = marked(plainText.toString())
this.is_preview = true;
}
else {
this.is_preview = false;
}
this.cdr.detectChanges();
})
}
/**
* Method to show insert URL actionsheet
*/
presentInsertURL(): void {
let alert = this.alertCtrl.create({
title: this.translate.instant('general.insert_image.title'),
inputs: [
{
name: 'URL',
placeholder: this.translate.instant('general.insert_image.url'),
}
],
buttons: [
{
text: this.translate.instant('general.insert_image.cancel'),
role: 'cancel',
handler: data => {
}
},
{
text: 'OK',
handler: data => {
this.insertText('');
}
}
]
});
alert.present();
}
/**
* Method to get caret position in a textfield
* @param oField
*/
getCaretPos(oField): void {
let node = oField._elementRef.nativeElement.children[0];
if (node.selectionStart || node.selectionStart == '0') {
this.caret = node.selectionStart;
}
}
/**
* Method to show a toast message
* @param {String} msg: message to show in the toast
*/
presentToast(msg) {
let toast = this.toastCtrl.create({
message: msg,
duration: 1500,
position: 'bottom'
});
toast.present();
}
/**
* Method to post the article
*/
private post(): void {
if (this.storyForm.valid) {
if (this.storyForm.controls.tags.value.match(/[^,\s][^\,]*[^,\s]*/g)) {
let loading = this.loadingCtrl.create({
content: this.translate.instant('generic_messages.creating_post')
});
loading.present();
let tags;
if (this.storyForm.controls.tags.value.indexOf(',') > -1) {
tags = this.storyForm.controls.tags.value.trim().split(',')
}
else if (this.storyForm.controls.tags.value.indexOf(' ') > -1) {
tags = this.storyForm.controls.tags.value.trim().split(' ');
}
else if (this.storyForm.controls.tags.value.trim() === '') {
this.alerts.display_alert('NO_TAGS');
return;
}
else {
tags = [this.storyForm.controls.tags.value.trim()]
}
tags = tags.map(v => v.toLowerCase());
this.steemActions.dispatch_post(
this.storyForm.controls.title.value,
this.storyForm.controls.description.value,
tags, this.upvote, this.rewards).then(res => {
if (res === 'not-logged-in') {
// Show alert telling the user that needs to login
loading.dismiss();
}
else if (res === 'Correct') {
loading.dismiss();
this.presentToast(this.translate.instant('generic_messages.posted_correctly'));
this.navCtrl.pop().then(() => {
this.deleteDraft();
});
}
else if (res === 'POST_INTERVAL') {
this.show_prompt(loading, 'POST_INTERVAL');
}
else {
loading.dismiss();
}
});
}
else {
this.alerts.display_alert('ALL_FIELDS');
}
}
else {
this.alerts.display_alert('ALL_FIELDS');
}
}
private show_prompt(loader, msg) {
loader.dismiss();
setTimeout(() => {
this.alerts.display_alert(msg);
}, 500);
}
/**
* function to adjust the height of the message textarea
* @param {any} event - the event, which is provided by the textarea input
* @return {void}
*/
protected adjustTextarea(event: any): void {
let textarea: any = event.target;
textarea.style.overflow = 'hidden';
textarea.style.height = 'auto';
textarea.style.height = textarea.scrollHeight + 'px';
return;
}
/**
* Method to prevent default behavior of an object.
* @param event
*/
protected preventEnter(event: any): void {
event.preventDefault();
}
insertLink() {
let alert = this.alertCtrl.create({
title: this.translate.instant('modals.edit_post.insert_url'),
inputs: [
{
name: 'URL',
placeholder: this.translate.instant('modals.edit_post.url_placeholder'),
},
{
name: 'Text',
placeholder: this.translate.instant('modals.edit_post.mask_url'),
}
],
buttons: [
{
text: this.translate.instant('generic_messages.cancel'),
role: 'cancel',
handler: data => {
}
},
{
text: 'OK',
handler: data => {
this.insertText('[' + data.Text + '](' + data.URL + ')');
}
}
]
});
alert.present();
}
/**
* Method to present actionsheet with options
*/
presentActionSheet(): void {
let actionSheet = this.actionSheetCtrl.create({
title: this.translate.instant('general.camera_options.title'),
buttons: [
{
text: this.translate.instant('general.camera_options.camera'),
icon: 'camera',
handler: () => {
this.camera.choose_image(this.camera.FROM_CAMERA, false, 'post').then((image: any) => {
this.insertText(image);
});
}
},
{
text: this.translate.instant('general.camera_options.gallery'),
icon: 'albums',
handler: () => {
this.camera.choose_image(this.camera.FROM_GALLERY, true, 'post').then((image: any) => {
console.log(image);
this.insertText(image);
});
}
},
{
text: this.translate.instant('general.camera_options.custom_url'),
icon: 'md-globe',
handler: () => {
this.presentInsertURL()
}
},
{
text: this.translate.instant('generic_messages.cancel'),
icon: 'close',
role: 'cancel',
handler: () => {
}
}
]
});
actionSheet.present();
}
/**
* Method to prevent focus change of an element
* @param e
*/
preventFocusChange(e) {
e.preventDefault();
}
/**
* Method to force angular to detect changes in the component
*/
protected updateChanges(): void {
this.cdr.detectChanges();
}
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.