text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import {
TAbstractFile,
TFile,
TFolder,
Vault,
requireApiVersion,
} from "obsidian";
import AggregateError from "aggregate-error";
import PQueue from "p-queue";
import type {
RemoteItem,
SyncTriggerSourceType,
DecisionType,
FileOrFolderMixedState,
SUPPORTED_SERVICES_TYPE,
} from "./baseTypes";
import { API_VER_STAT_FOLDER } from "./baseTypes";
import {
decryptBase32ToString,
decryptBase64urlToString,
encryptStringToBase64url,
getSizeFromOrigToEnc,
MAGIC_ENCRYPTED_PREFIX_BASE32,
MAGIC_ENCRYPTED_PREFIX_BASE64URL,
} from "./encrypt";
import type { FileFolderHistoryRecord, InternalDBs } from "./localdb";
import {
clearDeleteRenameHistoryOfKeyAndVault,
getSyncMetaMappingByRemoteKeyAndVault,
upsertSyncMetaMappingDataByVault,
} from "./localdb";
import {
isHiddenPath,
isVaildText,
mkdirpInVault,
getFolderLevels,
getParentFolder,
atWhichLevel,
unixTimeToStr,
statFix,
} from "./misc";
import { RemoteClient } from "./remote";
import {
MetadataOnRemote,
DeletionOnRemote,
serializeMetadataOnRemote,
deserializeMetadataOnRemote,
DEFAULT_FILE_NAME_FOR_METADATAONREMOTE,
DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2,
isEqualMetadataOnRemote,
} from "./metadataOnRemote";
import { isInsideObsFolder, ObsConfigDirFileType } from "./obsFolderLister";
import { log } from "./moreOnLog";
export type SyncStatusType =
| "idle"
| "preparing"
| "getting_remote_files_list"
| "getting_remote_extra_meta"
| "getting_local_meta"
| "checking_password"
| "generating_plan"
| "syncing"
| "cleaning"
| "finish";
export interface SyncPlanType {
ts: number;
tsFmt?: string;
syncTriggerSource?: SyncTriggerSourceType;
remoteType: SUPPORTED_SERVICES_TYPE;
mixedStates: Record<string, FileOrFolderMixedState>;
}
export interface PasswordCheckType {
ok: boolean;
reason:
| "ok"
| "empty_remote"
| "remote_encrypted_local_no_password"
| "password_matched"
| "password_not_matched"
| "invalid_text_after_decryption"
| "remote_not_encrypted_local_has_password"
| "no_password_both_sides";
}
export const isPasswordOk = async (
remote: RemoteItem[],
password: string = ""
) => {
if (remote === undefined || remote.length === 0) {
// remote empty
return {
ok: true,
reason: "empty_remote",
} as PasswordCheckType;
}
const santyCheckKey = remote[0].key;
if (santyCheckKey.startsWith(MAGIC_ENCRYPTED_PREFIX_BASE32)) {
// this is encrypted using old base32!
// try to decrypt it using the provided password.
if (password === "") {
return {
ok: false,
reason: "remote_encrypted_local_no_password",
} as PasswordCheckType;
}
try {
const res = await decryptBase32ToString(santyCheckKey, password);
// additional test
// because iOS Safari bypasses decryption with wrong password!
if (isVaildText(res)) {
return {
ok: true,
reason: "password_matched",
} as PasswordCheckType;
} else {
return {
ok: false,
reason: "invalid_text_after_decryption",
} as PasswordCheckType;
}
} catch (error) {
return {
ok: false,
reason: "password_not_matched",
} as PasswordCheckType;
}
}
if (santyCheckKey.startsWith(MAGIC_ENCRYPTED_PREFIX_BASE64URL)) {
// this is encrypted using new base64url!
// try to decrypt it using the provided password.
if (password === "") {
return {
ok: false,
reason: "remote_encrypted_local_no_password",
} as PasswordCheckType;
}
try {
const res = await decryptBase64urlToString(santyCheckKey, password);
// additional test
// because iOS Safari bypasses decryption with wrong password!
if (isVaildText(res)) {
return {
ok: true,
reason: "password_matched",
} as PasswordCheckType;
} else {
return {
ok: false,
reason: "invalid_text_after_decryption",
} as PasswordCheckType;
}
} catch (error) {
return {
ok: false,
reason: "password_not_matched",
} as PasswordCheckType;
}
} else {
// it is not encrypted!
if (password !== "") {
return {
ok: false,
reason: "remote_not_encrypted_local_has_password",
} as PasswordCheckType;
}
return {
ok: true,
reason: "no_password_both_sides",
} as PasswordCheckType;
}
};
export const parseRemoteItems = async (
remote: RemoteItem[],
db: InternalDBs,
vaultRandomID: string,
remoteType: SUPPORTED_SERVICES_TYPE,
password: string = ""
) => {
const remoteStates = [] as FileOrFolderMixedState[];
let metadataFile: FileOrFolderMixedState = undefined;
if (remote === undefined) {
return {
remoteStates: remoteStates,
metadataFile: metadataFile,
};
}
for (const entry of remote) {
const remoteEncryptedKey = entry.key;
let key = remoteEncryptedKey;
if (password !== "") {
if (remoteEncryptedKey.startsWith(MAGIC_ENCRYPTED_PREFIX_BASE32)) {
key = await decryptBase32ToString(remoteEncryptedKey, password);
} else if (
remoteEncryptedKey.startsWith(MAGIC_ENCRYPTED_PREFIX_BASE64URL)
) {
key = await decryptBase64urlToString(remoteEncryptedKey, password);
} else {
throw Error(`unexpected key=${remoteEncryptedKey}`);
}
}
const backwardMapping = await getSyncMetaMappingByRemoteKeyAndVault(
remoteType,
db,
key,
entry.lastModified,
entry.etag,
vaultRandomID
);
let r = {} as FileOrFolderMixedState;
if (backwardMapping !== undefined) {
key = backwardMapping.localKey;
const mtimeRemote = backwardMapping.localMtime || entry.lastModified;
// the backwardMapping.localSize is the file BEFORE encryption
// we want to split two sizes for comparation later
r = {
key: key,
existRemote: true,
mtimeRemote: mtimeRemote,
mtimeRemoteFmt: unixTimeToStr(mtimeRemote),
sizeRemote: backwardMapping.localSize,
sizeRemoteEnc: password === "" ? undefined : entry.size,
remoteEncryptedKey: remoteEncryptedKey,
changeRemoteMtimeUsingMapping: true,
};
} else {
// do not have backwardMapping
r = {
key: key,
existRemote: true,
mtimeRemote: entry.lastModified,
mtimeRemoteFmt: unixTimeToStr(entry.lastModified),
sizeRemote: password === "" ? entry.size : undefined,
sizeRemoteEnc: password === "" ? undefined : entry.size,
remoteEncryptedKey: remoteEncryptedKey,
changeRemoteMtimeUsingMapping: false,
};
}
if (r.key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE) {
metadataFile = Object.assign({}, r);
}
if (r.key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2) {
throw Error(
`A reserved file name ${r.key} has been found. You may upgrade the plugin to latest version to try to deal with it.`
);
}
remoteStates.push(r);
}
return {
remoteStates: remoteStates,
metadataFile: metadataFile,
};
};
export const fetchMetadataFile = async (
metadataFile: FileOrFolderMixedState,
client: RemoteClient,
vault: Vault,
password: string = ""
) => {
if (metadataFile === undefined) {
log.debug("no metadata file, so no fetch");
return {
deletions: [],
} as MetadataOnRemote;
}
const buf = await client.downloadFromRemote(
metadataFile.key,
vault,
metadataFile.mtimeRemote,
password,
metadataFile.remoteEncryptedKey,
true
);
const metadata = deserializeMetadataOnRemote(buf);
return metadata;
};
const isSkipItem = (
key: string,
syncConfigDir: boolean,
syncUnderscoreItems: boolean,
configDir: string
) => {
if (syncConfigDir && isInsideObsFolder(key, configDir)) {
return false;
}
return (
isHiddenPath(key, true, false) ||
(!syncUnderscoreItems && isHiddenPath(key, false, true)) ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2
);
};
const ensembleMixedStates = async (
remoteStates: FileOrFolderMixedState[],
local: TAbstractFile[],
localConfigDirContents: ObsConfigDirFileType[] | undefined,
remoteDeleteHistory: DeletionOnRemote[],
localFileHistory: FileFolderHistoryRecord[],
syncConfigDir: boolean,
configDir: string,
syncUnderscoreItems: boolean,
password: string
) => {
const results = {} as Record<string, FileOrFolderMixedState>;
for (const r of remoteStates) {
const key = r.key;
if (isSkipItem(key, syncConfigDir, syncUnderscoreItems, configDir)) {
continue;
}
results[key] = r;
results[key].existLocal = false;
}
for (const entry of local) {
let r = {} as FileOrFolderMixedState;
let key = entry.path;
if (entry.path === "/") {
// ignore
continue;
} else if (entry instanceof TFile) {
const mtimeLocal = Math.max(entry.stat.mtime ?? 0, entry.stat.ctime ?? 0);
r = {
key: entry.path,
existLocal: true,
mtimeLocal: mtimeLocal,
mtimeLocalFmt: unixTimeToStr(mtimeLocal),
sizeLocal: entry.stat.size,
sizeLocalEnc:
password === "" ? undefined : getSizeFromOrigToEnc(entry.stat.size),
};
} else if (entry instanceof TFolder) {
key = `${entry.path}/`;
r = {
key: key,
existLocal: true,
mtimeLocal: undefined,
mtimeLocalFmt: undefined,
sizeLocal: 0,
sizeLocalEnc: password === "" ? undefined : getSizeFromOrigToEnc(0),
};
} else {
throw Error(`unexpected ${entry}`);
}
if (isSkipItem(key, syncConfigDir, syncUnderscoreItems, configDir)) {
continue;
}
if (results.hasOwnProperty(key)) {
results[key].key = r.key;
results[key].existLocal = r.existLocal;
results[key].mtimeLocal = r.mtimeLocal;
results[key].mtimeLocalFmt = r.mtimeLocalFmt;
results[key].sizeLocal = r.sizeLocal;
results[key].sizeLocalEnc = r.sizeLocalEnc;
} else {
results[key] = r;
results[key].existRemote = false;
}
}
if (syncConfigDir && localConfigDirContents !== undefined) {
for (const entry of localConfigDirContents) {
const key = entry.key;
let mtimeLocal = Math.max(entry.mtime ?? 0, entry.ctime ?? 0);
if (Number.isNaN(mtimeLocal) || mtimeLocal === 0) {
mtimeLocal = undefined;
}
const r: FileOrFolderMixedState = {
key: key,
existLocal: true,
mtimeLocal: mtimeLocal,
mtimeLocalFmt: unixTimeToStr(mtimeLocal),
sizeLocal: entry.size,
sizeLocalEnc:
password === "" ? undefined : getSizeFromOrigToEnc(entry.size),
};
if (results.hasOwnProperty(key)) {
results[key].key = r.key;
results[key].existLocal = r.existLocal;
results[key].mtimeLocal = r.mtimeLocal;
results[key].mtimeLocalFmt = r.mtimeLocalFmt;
results[key].sizeLocal = r.sizeLocal;
results[key].sizeLocalEnc = r.sizeLocalEnc;
} else {
results[key] = r;
results[key].existRemote = false;
}
}
}
for (const entry of remoteDeleteHistory) {
const key = entry.key;
const r = {
key: key,
deltimeRemote: entry.actionWhen,
deltimeRemoteFmt: unixTimeToStr(entry.actionWhen),
} as FileOrFolderMixedState;
if (isSkipItem(key, syncConfigDir, syncUnderscoreItems, configDir)) {
continue;
}
if (results.hasOwnProperty(key)) {
results[key].key = r.key;
results[key].deltimeRemote = r.deltimeRemote;
results[key].deltimeRemoteFmt = r.deltimeRemoteFmt;
} else {
results[key] = r;
results[key].existLocal = false;
results[key].existRemote = false;
}
}
for (const entry of localFileHistory) {
let key = entry.key;
if (entry.keyType === "folder") {
if (!entry.key.endsWith("/")) {
key = `${entry.key}/`;
}
} else if (entry.keyType === "file") {
// pass
} else {
throw Error(`unexpected ${entry}`);
}
if (isSkipItem(key, syncConfigDir, syncUnderscoreItems, configDir)) {
continue;
}
if (entry.actionType === "delete" || entry.actionType === "rename") {
const r = {
key: key,
deltimeLocal: entry.actionWhen,
deltimeLocalFmt: unixTimeToStr(entry.actionWhen),
} as FileOrFolderMixedState;
if (results.hasOwnProperty(key)) {
results[key].deltimeLocal = r.deltimeLocal;
results[key].deltimeLocalFmt = r.deltimeLocalFmt;
} else {
results[key] = r;
results[key].existLocal = false; // we have already checked local
results[key].existRemote = false; // we have already checked remote
}
} else if (entry.actionType === "renameDestination") {
const r = {
key: key,
mtimeLocal: entry.actionWhen,
mtimeLocalFmt: unixTimeToStr(entry.actionWhen),
changeLocalMtimeUsingMapping: true,
};
if (results.hasOwnProperty(key)) {
let mtimeLocal = Math.max(
r.mtimeLocal ?? 0,
results[key].mtimeLocal ?? 0
);
if (Number.isNaN(mtimeLocal) || mtimeLocal === 0) {
mtimeLocal = undefined;
}
results[key].mtimeLocal = mtimeLocal;
results[key].mtimeLocalFmt = unixTimeToStr(mtimeLocal);
results[key].changeLocalMtimeUsingMapping =
r.changeLocalMtimeUsingMapping;
} else {
// So, the file doesn't exist,
// except that it existed in the "renamed to" history records.
// Most likely because that the user deleted the file while Obsidian was closed,
// so Obsidian could not track the deletions.
// We are not sure how to deal with this, so do not generate anything here!
// // // The following 3 lines are of old logic, and have been removed:
// // results[key] = r;
// // results[key].existLocal = false; // we have already checked local
// // results[key].existRemote = false; // we have already checked remote
}
} else {
throw Error(
`do not know how to deal with local file history ${entry.key} with ${entry.actionType}`
);
}
}
return results;
};
const assignOperationToFileInplace = (
origRecord: FileOrFolderMixedState,
keptFolder: Set<string>,
skipSizeLargerThan: number,
password: string = ""
) => {
let r = origRecord;
// files and folders are treated differently
// here we only check files
if (r.key.endsWith("/")) {
return r;
}
// we find the max date from four sources
// 0. find anything inconsistent
if (r.existLocal && (r.mtimeLocal === undefined || r.mtimeLocal <= 0)) {
throw Error(
`Error: Abnormal last modified time locally: ${JSON.stringify(
r,
null,
2
)}`
);
}
if (r.existRemote && (r.mtimeRemote === undefined || r.mtimeRemote <= 0)) {
throw Error(
`Error: Abnormal last modified time remotely: ${JSON.stringify(
r,
null,
2
)}`
);
}
if (r.deltimeLocal !== undefined && r.deltimeLocal <= 0) {
throw Error(
`Error: Abnormal deletion time locally: ${JSON.stringify(r, null, 2)}`
);
}
if (r.deltimeRemote !== undefined && r.deltimeRemote <= 0) {
throw Error(
`Error: Abnormal deletion time remotely: ${JSON.stringify(r, null, 2)}`
);
}
if (
(r.existLocal && password !== "" && r.sizeLocalEnc === undefined) ||
(r.existRemote && password !== "" && r.sizeRemoteEnc === undefined)
) {
throw new Error(
`Error: No encryption sizes: ${JSON.stringify(r, null, 2)}`
);
}
const sizeLocalComp = password === "" ? r.sizeLocal : r.sizeLocalEnc;
const sizeRemoteComp = password === "" ? r.sizeRemote : r.sizeRemoteEnc;
// 1. mtimeLocal
if (r.existLocal) {
const mtimeRemote = r.existRemote ? r.mtimeRemote : -1;
const deltimeRemote = r.deltimeRemote !== undefined ? r.deltimeRemote : -1;
const deltimeLocal = r.deltimeLocal !== undefined ? r.deltimeLocal : -1;
if (
r.mtimeLocal >= mtimeRemote &&
r.mtimeLocal >= deltimeLocal &&
r.mtimeLocal >= deltimeRemote
) {
if (sizeLocalComp === undefined) {
throw new Error(
`Error: no local size but has local mtime: ${JSON.stringify(
r,
null,
2
)}`
);
}
if (r.mtimeLocal === r.mtimeRemote) {
// local and remote both exist and mtimes are the same
if (sizeLocalComp === sizeRemoteComp) {
// do not need to consider skipSizeLargerThan in this case
r.decision = "skipUploading";
r.decisionBranch = 1;
} else {
if (skipSizeLargerThan <= 0) {
r.decision = "uploadLocalToRemote";
r.decisionBranch = 2;
} else {
// limit the sizes
if (sizeLocalComp <= skipSizeLargerThan) {
if (sizeRemoteComp <= skipSizeLargerThan) {
r.decision = "uploadLocalToRemote";
r.decisionBranch = 18;
} else {
r.decision = "errorRemoteTooLargeConflictLocal";
r.decisionBranch = 19;
}
} else {
if (sizeRemoteComp <= skipSizeLargerThan) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 20;
} else {
r.decision = "skipUploadingTooLarge";
r.decisionBranch = 21;
}
}
}
}
} else {
// we have local laregest mtime,
// and the remote not existing or smaller mtime
if (skipSizeLargerThan <= 0) {
// no need to consider sizes
r.decision = "uploadLocalToRemote";
r.decisionBranch = 4;
} else {
// need to consider sizes
if (sizeLocalComp <= skipSizeLargerThan) {
if (sizeRemoteComp === undefined) {
r.decision = "uploadLocalToRemote";
r.decisionBranch = 22;
} else if (sizeRemoteComp <= skipSizeLargerThan) {
r.decision = "uploadLocalToRemote";
r.decisionBranch = 23;
} else {
r.decision = "errorRemoteTooLargeConflictLocal";
r.decisionBranch = 24;
}
} else {
if (sizeRemoteComp === undefined) {
r.decision = "skipUploadingTooLarge";
r.decisionBranch = 25;
} else if (sizeRemoteComp <= skipSizeLargerThan) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 26;
} else {
r.decision = "skipUploadingTooLarge";
r.decisionBranch = 27;
}
}
}
}
keptFolder.add(getParentFolder(r.key));
return r;
}
}
// 2. mtimeRemote
if (r.existRemote) {
const mtimeLocal = r.existLocal ? r.mtimeLocal : -1;
const deltimeRemote = r.deltimeRemote !== undefined ? r.deltimeRemote : -1;
const deltimeLocal = r.deltimeLocal !== undefined ? r.deltimeLocal : -1;
if (
r.mtimeRemote > mtimeLocal &&
r.mtimeRemote >= deltimeLocal &&
r.mtimeRemote >= deltimeRemote
) {
// we have remote laregest mtime,
// and the local not existing or smaller mtime
if (sizeRemoteComp === undefined) {
throw new Error(
`Error: no remote size but has remote mtime: ${JSON.stringify(
r,
null,
2
)}`
);
}
if (skipSizeLargerThan <= 0) {
// no need to consider sizes
r.decision = "downloadRemoteToLocal";
r.decisionBranch = 5;
} else {
// need to consider sizes
if (sizeRemoteComp <= skipSizeLargerThan) {
if (sizeLocalComp === undefined) {
r.decision = "downloadRemoteToLocal";
r.decisionBranch = 28;
} else if (sizeLocalComp <= skipSizeLargerThan) {
r.decision = "downloadRemoteToLocal";
r.decisionBranch = 29;
} else {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 30;
}
} else {
if (sizeLocalComp === undefined) {
r.decision = "skipDownloadingTooLarge";
r.decisionBranch = 31;
} else if (sizeLocalComp <= skipSizeLargerThan) {
r.decision = "errorRemoteTooLargeConflictLocal";
r.decisionBranch = 32;
} else {
r.decision = "skipDownloadingTooLarge";
r.decisionBranch = 33;
}
}
}
keptFolder.add(getParentFolder(r.key));
return r;
}
}
// 3. deltimeLocal
if (r.deltimeLocal !== undefined && r.deltimeLocal !== 0) {
const mtimeLocal = r.existLocal ? r.mtimeLocal : -1;
const mtimeRemote = r.existRemote ? r.mtimeRemote : -1;
const deltimeRemote = r.deltimeRemote !== undefined ? r.deltimeRemote : -1;
if (
r.deltimeLocal >= mtimeLocal &&
r.deltimeLocal >= mtimeRemote &&
r.deltimeLocal >= deltimeRemote
) {
if (skipSizeLargerThan <= 0) {
r.decision = "uploadLocalDelHistToRemote";
r.decisionBranch = 6;
if (r.existLocal || r.existRemote) {
// actual deletion would happen
}
} else {
const localTooLargeToDelete =
r.existLocal && sizeLocalComp > skipSizeLargerThan;
const remoteTooLargeToDelete =
r.existRemote && sizeRemoteComp > skipSizeLargerThan;
if (localTooLargeToDelete) {
if (remoteTooLargeToDelete) {
r.decision = "skipUsingLocalDelTooLarge";
r.decisionBranch = 34;
} else {
if (r.existRemote) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 35;
} else {
r.decision = "skipUsingLocalDelTooLarge";
r.decisionBranch = 36;
}
}
} else {
if (remoteTooLargeToDelete) {
if (r.existLocal) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 37;
} else {
r.decision = "skipUsingLocalDelTooLarge";
r.decisionBranch = 38;
}
} else {
r.decision = "uploadLocalDelHistToRemote";
r.decisionBranch = 39;
}
}
}
return r;
}
}
// 4. deltimeRemote
if (r.deltimeRemote !== undefined && r.deltimeRemote !== 0) {
const mtimeLocal = r.existLocal ? r.mtimeLocal : -1;
const mtimeRemote = r.existRemote ? r.mtimeRemote : -1;
const deltimeLocal = r.deltimeLocal !== undefined ? r.deltimeLocal : -1;
if (
r.deltimeRemote >= mtimeLocal &&
r.deltimeRemote >= mtimeRemote &&
r.deltimeRemote >= deltimeLocal
) {
if (skipSizeLargerThan <= 0) {
r.decision = "keepRemoteDelHist";
r.decisionBranch = 7;
if (r.existLocal || r.existRemote) {
// actual deletion would happen
}
} else {
const localTooLargeToDelete =
r.existLocal && sizeLocalComp > skipSizeLargerThan;
const remoteTooLargeToDelete =
r.existRemote && sizeRemoteComp > skipSizeLargerThan;
if (localTooLargeToDelete) {
if (remoteTooLargeToDelete) {
r.decision = "skipUsingRemoteDelTooLarge";
r.decisionBranch = 40;
} else {
if (r.existRemote) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 41;
} else {
r.decision = "skipUsingRemoteDelTooLarge";
r.decisionBranch = 42;
}
}
} else {
if (remoteTooLargeToDelete) {
if (r.existLocal) {
r.decision = "errorLocalTooLargeConflictRemote";
r.decisionBranch = 43;
} else {
r.decision = "skipUsingRemoteDelTooLarge";
r.decisionBranch = 44;
}
} else {
r.decision = "keepRemoteDelHist";
r.decisionBranch = 45;
}
}
}
return r;
}
}
throw Error(`no decision for ${JSON.stringify(r)}`);
};
const assignOperationToFolderInplace = async (
origRecord: FileOrFolderMixedState,
keptFolder: Set<string>,
vault: Vault,
password: string = ""
) => {
let r = origRecord;
// files and folders are treated differently
// here we only check folders
if (!r.key.endsWith("/")) {
return r;
}
if (!keptFolder.has(r.key)) {
// the folder does NOT have any must-be-kept children!
if (r.deltimeLocal !== undefined || r.deltimeRemote !== undefined) {
// it has some deletion "commands"
const deltimeLocal = r.deltimeLocal !== undefined ? r.deltimeLocal : -1;
const deltimeRemote =
r.deltimeRemote !== undefined ? r.deltimeRemote : -1;
// if it was created after deletion, we should keep it as is
if (requireApiVersion(API_VER_STAT_FOLDER)) {
if (r.existLocal) {
const { ctime, mtime } = await statFix(vault, r.key);
const cmtime = Math.max(ctime ?? 0, mtime ?? 0);
if (
!Number.isNaN(cmtime) &&
cmtime > 0 &&
cmtime >= deltimeLocal &&
cmtime >= deltimeRemote
) {
keptFolder.add(getParentFolder(r.key));
if (r.existLocal && r.existRemote) {
r.decision = "skipFolder";
r.decisionBranch = 14;
} else if (r.existLocal || r.existRemote) {
r.decision = "createFolder";
r.decisionBranch = 15;
} else {
throw Error(
`Error: Folder ${r.key} doesn't exist locally and remotely but is marked must be kept. Abort.`
);
}
}
}
}
// If it was moved to here, after deletion, we should keep it as is.
// The logic not necessarily needs API_VER_STAT_FOLDER.
// The folder needs this logic because it's also determined by file children.
// But the file do not need this logic because the mtimeLocal is checked firstly.
if (
r.existLocal &&
r.changeLocalMtimeUsingMapping &&
r.mtimeLocal > 0 &&
r.mtimeLocal > deltimeLocal &&
r.mtimeLocal > deltimeRemote
) {
keptFolder.add(getParentFolder(r.key));
if (r.existLocal && r.existRemote) {
r.decision = "skipFolder";
r.decisionBranch = 16;
} else if (r.existLocal || r.existRemote) {
r.decision = "createFolder";
r.decisionBranch = 17;
} else {
throw Error(
`Error: Folder ${r.key} doesn't exist locally and remotely but is marked must be kept. Abort.`
);
}
}
if (r.decision === undefined) {
// not yet decided by the above reason
if (deltimeLocal > 0 && deltimeLocal > deltimeRemote) {
r.decision = "uploadLocalDelHistToRemoteFolder";
r.decisionBranch = 8;
} else {
r.decision = "keepRemoteDelHistFolder";
r.decisionBranch = 9;
}
}
} else {
// it does not have any deletion commands
// keep it as is, and create it if necessary
keptFolder.add(getParentFolder(r.key));
if (r.existLocal && r.existRemote) {
r.decision = "skipFolder";
r.decisionBranch = 10;
} else if (r.existLocal || r.existRemote) {
r.decision = "createFolder";
r.decisionBranch = 11;
} else {
throw Error(
`Error: Folder ${r.key} doesn't exist locally and remotely but is marked must be kept. Abort.`
);
}
}
} else {
// the folder has some must be kept children!
// so itself and its parent folder must be kept
keptFolder.add(getParentFolder(r.key));
if (r.existLocal && r.existRemote) {
r.decision = "skipFolder";
r.decisionBranch = 12;
} else if (r.existLocal || r.existRemote) {
r.decision = "createFolder";
r.decisionBranch = 13;
} else {
throw Error(
`Error: Folder ${r.key} doesn't exist locally and remotely but is marked must be kept. Abort.`
);
}
}
// save the memory, save the world!
// we have dealt with it, so we don't need it any more.
keptFolder.delete(r.key);
return r;
};
const DELETION_DECISIONS: Set<DecisionType> = new Set([
"uploadLocalDelHistToRemote",
"keepRemoteDelHist",
"uploadLocalDelHistToRemoteFolder",
"keepRemoteDelHistFolder",
]);
const SIZES_GO_WRONG_DECISIONS: Set<DecisionType> = new Set([
"errorLocalTooLargeConflictRemote",
"errorRemoteTooLargeConflictLocal",
]);
export const getSyncPlan = async (
remoteStates: FileOrFolderMixedState[],
local: TAbstractFile[],
localConfigDirContents: ObsConfigDirFileType[] | undefined,
remoteDeleteHistory: DeletionOnRemote[],
localFileHistory: FileFolderHistoryRecord[],
remoteType: SUPPORTED_SERVICES_TYPE,
triggerSource: SyncTriggerSourceType,
vault: Vault,
syncConfigDir: boolean,
configDir: string,
syncUnderscoreItems: boolean,
skipSizeLargerThan: number,
password: string = ""
) => {
const mixedStates = await ensembleMixedStates(
remoteStates,
local,
localConfigDirContents,
remoteDeleteHistory,
localFileHistory,
syncConfigDir,
configDir,
syncUnderscoreItems,
password
);
const sortedKeys = Object.keys(mixedStates).sort(
(k1, k2) => k2.length - k1.length
);
const sizesGoWrong: FileOrFolderMixedState[] = [];
const deletions: DeletionOnRemote[] = [];
const keptFolder = new Set<string>();
for (let i = 0; i < sortedKeys.length; ++i) {
const key = sortedKeys[i];
const val = mixedStates[key];
if (key.endsWith("/")) {
// decide some folders
// because the keys are sorted by length
// so all the children must have been shown up before in the iteration
await assignOperationToFolderInplace(val, keptFolder, vault, password);
} else {
// get all operations of files
// and at the same time get some helper info for folders
assignOperationToFileInplace(
val,
keptFolder,
skipSizeLargerThan,
password
);
}
if (SIZES_GO_WRONG_DECISIONS.has(val.decision)) {
sizesGoWrong.push(val);
}
if (DELETION_DECISIONS.has(val.decision)) {
if (val.decision === "uploadLocalDelHistToRemote") {
deletions.push({
key: key,
actionWhen: val.deltimeLocal,
});
} else if (val.decision === "keepRemoteDelHist") {
deletions.push({
key: key,
actionWhen: val.deltimeRemote,
});
} else if (val.decision === "uploadLocalDelHistToRemoteFolder") {
deletions.push({
key: key,
actionWhen: val.deltimeLocal,
});
} else if (val.decision === "keepRemoteDelHistFolder") {
deletions.push({
key: key,
actionWhen: val.deltimeRemote,
});
} else {
throw Error(`do not know how to delete for decision ${val.decision}`);
}
}
}
const currTs = Date.now();
const currTsFmt = unixTimeToStr(currTs);
const plan = {
ts: currTs,
tsFmt: currTsFmt,
remoteType: remoteType,
syncTriggerSource: triggerSource,
mixedStates: mixedStates,
} as SyncPlanType;
return {
plan: plan,
sortedKeys: sortedKeys,
deletions: deletions,
sizesGoWrong: sizesGoWrong,
};
};
const uploadExtraMeta = async (
client: RemoteClient,
metadataFile: FileOrFolderMixedState | undefined,
origMetadata: MetadataOnRemote | undefined,
deletions: DeletionOnRemote[],
password: string = ""
) => {
if (deletions === undefined || deletions.length === 0) {
return;
}
const key = DEFAULT_FILE_NAME_FOR_METADATAONREMOTE;
let remoteEncryptedKey = key;
if (password !== "") {
if (metadataFile === undefined) {
remoteEncryptedKey = undefined;
} else {
remoteEncryptedKey = metadataFile.remoteEncryptedKey;
}
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
// remoteEncryptedKey = await encryptStringToBase32(key, password);
remoteEncryptedKey = await encryptStringToBase64url(key, password);
}
}
const newMetadata: MetadataOnRemote = {
deletions: deletions,
};
if (isEqualMetadataOnRemote(origMetadata, newMetadata)) {
log.debug(
"metadata are the same, no need to re-generate and re-upload it."
);
return;
}
const resultText = serializeMetadataOnRemote(newMetadata);
await client.uploadToRemote(
key,
undefined,
false,
password,
remoteEncryptedKey,
undefined,
true,
resultText
);
};
const dispatchOperationToActual = async (
key: string,
vaultRandomID: string,
r: FileOrFolderMixedState,
client: RemoteClient,
db: InternalDBs,
vault: Vault,
localDeleteFunc: any,
password: string = ""
) => {
let remoteEncryptedKey = key;
if (password !== "") {
remoteEncryptedKey = r.remoteEncryptedKey;
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
// the old version uses base32
// remoteEncryptedKey = await encryptStringToBase32(key, password);
// the new version users base64url
remoteEncryptedKey = await encryptStringToBase64url(key, password);
}
}
if (r.decision === undefined) {
throw Error(`unknown decision in ${JSON.stringify(r)}`);
} else if (r.decision === "skipUploading") {
// do nothing!
} else if (r.decision === "uploadLocalDelHistToRemote") {
if (r.existLocal) {
await localDeleteFunc(r.key);
}
if (r.existRemote) {
await client.deleteFromRemote(r.key, password, remoteEncryptedKey);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "keepRemoteDelHist") {
if (r.existLocal) {
await localDeleteFunc(r.key);
}
if (r.existRemote) {
await client.deleteFromRemote(r.key, password, remoteEncryptedKey);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "uploadLocalToRemote") {
if (
client.serviceType === "onedrive" &&
r.sizeLocal === 0 &&
password === ""
) {
// special treatment for empty files for OneDrive
// TODO: it's ugly, any other way?
// special treatment for OneDrive: do nothing, skip empty file without encryption
// if it's empty folder, or it's encrypted file/folder, it continues to be uploaded.
} else {
const remoteObjMeta = await client.uploadToRemote(
r.key,
vault,
false,
password,
remoteEncryptedKey
);
await upsertSyncMetaMappingDataByVault(
client.serviceType,
db,
r.key,
r.mtimeLocal,
r.sizeLocal,
r.key,
remoteObjMeta.lastModified,
remoteObjMeta.size,
remoteObjMeta.etag,
vaultRandomID
);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "downloadRemoteToLocal") {
await mkdirpInVault(r.key, vault); /* should be unnecessary */
await client.downloadFromRemote(
r.key,
vault,
r.mtimeRemote,
password,
remoteEncryptedKey
);
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "createFolder") {
if (!r.existLocal) {
await mkdirpInVault(r.key, vault);
}
if (!r.existRemote) {
const remoteObjMeta = await client.uploadToRemote(
r.key,
vault,
false,
password,
remoteEncryptedKey
);
await upsertSyncMetaMappingDataByVault(
client.serviceType,
db,
r.key,
r.mtimeLocal,
r.sizeLocal,
r.key,
remoteObjMeta.lastModified,
remoteObjMeta.size,
remoteObjMeta.etag,
vaultRandomID
);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "uploadLocalDelHistToRemoteFolder") {
if (r.existLocal) {
await localDeleteFunc(r.key);
}
if (r.existRemote) {
await client.deleteFromRemote(r.key, password, remoteEncryptedKey);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "keepRemoteDelHistFolder") {
if (r.existLocal) {
await localDeleteFunc(r.key);
}
if (r.existRemote) {
await client.deleteFromRemote(r.key, password, remoteEncryptedKey);
}
await clearDeleteRenameHistoryOfKeyAndVault(db, r.key, vaultRandomID);
} else if (r.decision === "skipFolder") {
// do nothing!
} else if (r.decision === "skipUploadingTooLarge") {
// do nothing!
} else if (r.decision === "skipDownloadingTooLarge") {
// do nothing!
} else if (r.decision === "skipUsingLocalDelTooLarge") {
// do nothing!
} else if (r.decision === "skipUsingRemoteDelTooLarge") {
// do nothing!
} else {
throw Error(`unknown decision in ${JSON.stringify(r)}`);
}
};
const splitThreeSteps = (syncPlan: SyncPlanType, sortedKeys: string[]) => {
const mixedStates = syncPlan.mixedStates;
const totalCount = sortedKeys.length || 0;
const folderCreationOps: FileOrFolderMixedState[][] = [];
const deletionOps: FileOrFolderMixedState[][] = [];
const uploadDownloads: FileOrFolderMixedState[][] = [];
let realTotalCount = 0;
for (let i = 0; i < sortedKeys.length; ++i) {
const key = sortedKeys[i];
const val: FileOrFolderMixedState = Object.assign({}, mixedStates[key]); // copy to avoid issue
if (
val.decision === "skipFolder" ||
val.decision === "skipUploading" ||
val.decision === "skipDownloadingTooLarge" ||
val.decision === "skipUploadingTooLarge" ||
val.decision === "skipUsingLocalDelTooLarge" ||
val.decision === "skipUsingRemoteDelTooLarge"
) {
// pass
} else if (val.decision === "createFolder") {
const level = atWhichLevel(key);
if (folderCreationOps[level - 1] === undefined) {
folderCreationOps[level - 1] = [val];
} else {
folderCreationOps[level - 1].push(val);
}
realTotalCount += 1;
} else if (
val.decision === "uploadLocalDelHistToRemoteFolder" ||
val.decision === "keepRemoteDelHistFolder" ||
val.decision === "uploadLocalDelHistToRemote" ||
val.decision === "keepRemoteDelHist"
) {
const level = atWhichLevel(key);
if (deletionOps[level - 1] === undefined) {
deletionOps[level - 1] = [val];
} else {
deletionOps[level - 1].push(val);
}
realTotalCount += 1;
} else if (
val.decision === "uploadLocalToRemote" ||
val.decision === "downloadRemoteToLocal"
) {
if (uploadDownloads.length === 0) {
uploadDownloads[0] = [val];
} else {
uploadDownloads[0].push(val); // only one level needed here
}
realTotalCount += 1;
} else {
throw Error(`unknown decision ${val.decision} for ${key}`);
}
}
// the deletionOps should be run from max level to min level
// right now it is sorted by level from min to max (NOT length of key!)
// so we need to reverse it!
deletionOps.reverse(); // inplace reverse
return {
folderCreationOps: folderCreationOps,
deletionOps: deletionOps,
uploadDownloads: uploadDownloads,
realTotalCount: realTotalCount,
};
};
export const doActualSync = async (
client: RemoteClient,
db: InternalDBs,
vaultRandomID: string,
vault: Vault,
syncPlan: SyncPlanType,
sortedKeys: string[],
metadataFile: FileOrFolderMixedState,
origMetadata: MetadataOnRemote,
sizesGoWrong: FileOrFolderMixedState[],
deletions: DeletionOnRemote[],
localDeleteFunc: any,
password: string = "",
concurrency: number = 1,
callbackSizesGoWrong?: any,
callbackSyncProcess?: any
) => {
const mixedStates = syncPlan.mixedStates;
const totalCount = sortedKeys.length || 0;
if (sizesGoWrong.length > 0) {
log.debug(`some sizes are larger than the threshold, abort and show hints`);
callbackSizesGoWrong(sizesGoWrong);
return;
}
log.debug(`start syncing extra data firstly`);
await uploadExtraMeta(
client,
metadataFile,
origMetadata,
deletions,
password
);
log.debug(`finish syncing extra data firstly`);
log.debug(`concurrency === ${concurrency}`);
if (concurrency === 1) {
// run everything in sequence
// good old way
for (let i = 0; i < sortedKeys.length; ++i) {
const key = sortedKeys[i];
const val = mixedStates[key];
log.debug(`start syncing "${key}" with plan ${JSON.stringify(val)}`);
if (callbackSyncProcess !== undefined) {
await callbackSyncProcess(i, totalCount, key, val.decision);
}
await dispatchOperationToActual(
key,
vaultRandomID,
val,
client,
db,
vault,
localDeleteFunc,
password
);
log.debug(`finished ${key}`);
}
return; // shortcut return, avoid too many nests below
}
const { folderCreationOps, deletionOps, uploadDownloads, realTotalCount } =
splitThreeSteps(syncPlan, sortedKeys);
const nested = [folderCreationOps, deletionOps, uploadDownloads];
const logTexts = [
`1. create all folders from shadowest to deepest, also check undefined decision`,
`2. delete files and folders from deepest to shadowest`,
`3. upload or download files in parallel, with the desired concurrency=${concurrency}`,
];
let realCounter = 0;
for (let i = 0; i < nested.length; ++i) {
log.debug(logTexts[i]);
const operations: FileOrFolderMixedState[][] = nested[i];
for (let j = 0; j < operations.length; ++j) {
const singleLevelOps: FileOrFolderMixedState[] | undefined =
operations[j];
if (singleLevelOps === undefined || singleLevelOps === null) {
continue;
}
const queue = new PQueue({ concurrency: concurrency, autoStart: true });
const potentialErrors: Error[] = [];
let tooManyErrors = false;
for (let k = 0; k < singleLevelOps.length; ++k) {
const val: FileOrFolderMixedState = singleLevelOps[k];
const key = val.key;
const fn = async () => {
log.debug(`start syncing "${key}" with plan ${JSON.stringify(val)}`);
if (callbackSyncProcess !== undefined) {
await callbackSyncProcess(
realCounter,
realTotalCount,
key,
val.decision
);
realCounter += 1;
}
await dispatchOperationToActual(
key,
vaultRandomID,
val,
client,
db,
vault,
localDeleteFunc,
password
);
log.debug(`finished ${key}`);
};
queue.add(fn).catch((e) => {
const msg = `${key}: ${e.message}`;
potentialErrors.push(new Error(msg));
if (potentialErrors.length >= 3) {
tooManyErrors = true;
queue.pause();
queue.clear();
}
});
}
await queue.onIdle();
if (potentialErrors.length > 0) {
if (tooManyErrors) {
potentialErrors.push(
new Error("too many errors, stop the remaining tasks")
);
}
throw new AggregateError(potentialErrors);
}
}
}
}; | the_stack |
import * as msRest from "@azure/ms-rest-js";
export const ResponseBase: msRest.CompositeMapper = {
serializedName: "ResponseBase",
type: {
name: "Composite",
polymorphicDiscriminator: {
serializedName: "_type",
clientName: "_type"
},
uberParent: "ResponseBase",
className: "ResponseBase",
modelProperties: {
_type: {
required: true,
serializedName: "_type",
type: {
name: "String"
}
}
}
}
};
export const Identifiable: msRest.CompositeMapper = {
serializedName: "Identifiable",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Identifiable",
modelProperties: {
...ResponseBase.type.modelProperties,
id: {
readOnly: true,
serializedName: "id",
type: {
name: "String"
}
}
}
}
};
export const Response: msRest.CompositeMapper = {
serializedName: "Response",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Response",
modelProperties: {
...Identifiable.type.modelProperties,
readLink: {
readOnly: true,
serializedName: "readLink",
type: {
name: "String"
}
},
webSearchUrl: {
readOnly: true,
serializedName: "webSearchUrl",
type: {
name: "String"
}
}
}
}
};
export const Thing: msRest.CompositeMapper = {
serializedName: "Thing",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Thing",
modelProperties: {
...Response.type.modelProperties,
name: {
readOnly: true,
serializedName: "name",
type: {
name: "String"
}
},
url: {
readOnly: true,
serializedName: "url",
type: {
name: "String"
}
},
image: {
readOnly: true,
serializedName: "image",
type: {
name: "Composite",
className: "ImageObject"
}
},
description: {
readOnly: true,
serializedName: "description",
type: {
name: "String"
}
},
alternateName: {
readOnly: true,
serializedName: "alternateName",
type: {
name: "String"
}
},
bingId: {
readOnly: true,
serializedName: "bingId",
type: {
name: "String"
}
}
}
}
};
export const Intangible: msRest.CompositeMapper = {
serializedName: "Intangible",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Intangible",
modelProperties: {
...Thing.type.modelProperties
}
}
};
export const StructuredValue: msRest.CompositeMapper = {
serializedName: "StructuredValue",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "StructuredValue",
modelProperties: {
...Intangible.type.modelProperties
}
}
};
export const Point2D: msRest.CompositeMapper = {
serializedName: "Point2D",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Point2D",
modelProperties: {
...StructuredValue.type.modelProperties,
x: {
required: true,
serializedName: "x",
type: {
name: "Number"
}
},
y: {
required: true,
serializedName: "y",
type: {
name: "Number"
}
}
}
}
};
export const NormalizedQuadrilateral: msRest.CompositeMapper = {
serializedName: "NormalizedQuadrilateral",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "NormalizedQuadrilateral",
modelProperties: {
...StructuredValue.type.modelProperties,
topLeft: {
required: true,
serializedName: "topLeft",
type: {
name: "Composite",
className: "Point2D"
}
},
topRight: {
required: true,
serializedName: "topRight",
type: {
name: "Composite",
className: "Point2D"
}
},
bottomRight: {
required: true,
serializedName: "bottomRight",
type: {
name: "Composite",
className: "Point2D"
}
},
bottomLeft: {
required: true,
serializedName: "bottomLeft",
type: {
name: "Composite",
className: "Point2D"
}
}
}
}
};
export const ImageTagRegion: msRest.CompositeMapper = {
serializedName: "ImageTagRegion",
type: {
name: "Composite",
className: "ImageTagRegion",
modelProperties: {
queryRectangle: {
required: true,
serializedName: "queryRectangle",
type: {
name: "Composite",
className: "NormalizedQuadrilateral"
}
},
displayRectangle: {
required: true,
serializedName: "displayRectangle",
type: {
name: "Composite",
className: "NormalizedQuadrilateral"
}
}
}
}
};
export const CreativeWork: msRest.CompositeMapper = {
serializedName: "CreativeWork",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "CreativeWork",
modelProperties: {
...Thing.type.modelProperties,
thumbnailUrl: {
readOnly: true,
serializedName: "thumbnailUrl",
type: {
name: "String"
}
},
provider: {
readOnly: true,
serializedName: "provider",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Thing"
}
}
}
},
datePublished: {
readOnly: true,
serializedName: "datePublished",
type: {
name: "String"
}
},
text: {
readOnly: true,
serializedName: "text",
type: {
name: "String"
}
}
}
}
};
export const Action: msRest.CompositeMapper = {
serializedName: "Action",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Action",
modelProperties: {
...CreativeWork.type.modelProperties,
result: {
readOnly: true,
serializedName: "result",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Thing"
}
}
}
},
displayName: {
readOnly: true,
serializedName: "displayName",
type: {
name: "String"
}
},
isTopAction: {
readOnly: true,
serializedName: "isTopAction",
type: {
name: "Boolean"
}
},
serviceUrl: {
readOnly: true,
serializedName: "serviceUrl",
type: {
name: "String"
}
}
}
}
};
export const ImageAction: msRest.CompositeMapper = {
serializedName: "ImageAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageAction",
modelProperties: {
...Action.type.modelProperties,
actionType: {
readOnly: true,
serializedName: "actionType",
type: {
name: "String"
}
}
}
}
};
export const ImageTag: msRest.CompositeMapper = {
serializedName: "ImageTag",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageTag",
modelProperties: {
...Thing.type.modelProperties,
displayName: {
readOnly: true,
serializedName: "displayName",
type: {
name: "String"
}
},
boundingBox: {
readOnly: true,
serializedName: "boundingBox",
type: {
name: "Composite",
className: "ImageTagRegion"
}
},
actions: {
readOnly: true,
serializedName: "actions",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ImageAction"
}
}
}
}
}
}
};
export const Organization: msRest.CompositeMapper = {
serializedName: "Organization",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Organization",
modelProperties: {
...Thing.type.modelProperties
}
}
};
export const PropertiesItem: msRest.CompositeMapper = {
serializedName: "Properties/Item",
type: {
name: "Composite",
polymorphicDiscriminator: {
serializedName: "_type",
clientName: "_type"
},
uberParent: "PropertiesItem",
className: "PropertiesItem",
modelProperties: {
text: {
readOnly: true,
serializedName: "text",
type: {
name: "String"
}
},
_type: {
required: true,
serializedName: "_type",
type: {
name: "String"
}
}
}
}
};
export const Rating: msRest.CompositeMapper = {
serializedName: "Rating",
type: {
name: "Composite",
polymorphicDiscriminator: PropertiesItem.type.polymorphicDiscriminator,
uberParent: "PropertiesItem",
className: "Rating",
modelProperties: {
...PropertiesItem.type.modelProperties,
ratingValue: {
required: true,
serializedName: "ratingValue",
type: {
name: "Number"
}
},
bestRating: {
readOnly: true,
serializedName: "bestRating",
type: {
name: "Number"
}
}
}
}
};
export const AggregateRating: msRest.CompositeMapper = {
serializedName: "AggregateRating",
type: {
name: "Composite",
polymorphicDiscriminator: PropertiesItem.type.polymorphicDiscriminator,
uberParent: "PropertiesItem",
className: "AggregateRating",
modelProperties: {
...Rating.type.modelProperties,
reviewCount: {
readOnly: true,
serializedName: "reviewCount",
type: {
name: "Number"
}
}
}
}
};
export const Offer: msRest.CompositeMapper = {
serializedName: "Offer",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Offer",
modelProperties: {
...Thing.type.modelProperties,
seller: {
readOnly: true,
serializedName: "seller",
type: {
name: "Composite",
className: "Organization"
}
},
price: {
readOnly: true,
serializedName: "price",
type: {
name: "Number"
}
},
priceCurrency: {
readOnly: true,
serializedName: "priceCurrency",
defaultValue: 'USD',
type: {
name: "String"
}
},
availability: {
readOnly: true,
serializedName: "availability",
type: {
name: "String"
}
},
aggregateRating: {
readOnly: true,
serializedName: "aggregateRating",
type: {
name: "Composite",
className: "AggregateRating"
}
},
lastUpdated: {
readOnly: true,
serializedName: "lastUpdated",
type: {
name: "String"
}
}
}
}
};
export const AggregateOffer: msRest.CompositeMapper = {
serializedName: "AggregateOffer",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "AggregateOffer",
modelProperties: {
...Offer.type.modelProperties,
offers: {
readOnly: true,
serializedName: "offers",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Offer"
}
}
}
}
}
}
};
export const ImagesImageMetadata: msRest.CompositeMapper = {
serializedName: "Images/ImageMetadata",
type: {
name: "Composite",
className: "ImagesImageMetadata",
modelProperties: {
shoppingSourcesCount: {
readOnly: true,
serializedName: "shoppingSourcesCount",
type: {
name: "Number"
}
},
recipeSourcesCount: {
readOnly: true,
serializedName: "recipeSourcesCount",
type: {
name: "Number"
}
},
aggregateOffer: {
readOnly: true,
serializedName: "aggregateOffer",
type: {
name: "Composite",
className: "AggregateOffer"
}
}
}
}
};
export const MediaObject: msRest.CompositeMapper = {
serializedName: "MediaObject",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "MediaObject",
modelProperties: {
...CreativeWork.type.modelProperties,
contentUrl: {
readOnly: true,
serializedName: "contentUrl",
type: {
name: "String"
}
},
hostPageUrl: {
readOnly: true,
serializedName: "hostPageUrl",
type: {
name: "String"
}
},
contentSize: {
readOnly: true,
serializedName: "contentSize",
type: {
name: "String"
}
},
encodingFormat: {
readOnly: true,
serializedName: "encodingFormat",
type: {
name: "String"
}
},
hostPageDisplayUrl: {
readOnly: true,
serializedName: "hostPageDisplayUrl",
type: {
name: "String"
}
},
width: {
readOnly: true,
serializedName: "width",
type: {
name: "Number"
}
},
height: {
readOnly: true,
serializedName: "height",
type: {
name: "Number"
}
}
}
}
};
export const ImageObject: msRest.CompositeMapper = {
serializedName: "ImageObject",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageObject",
modelProperties: {
...MediaObject.type.modelProperties,
thumbnail: {
readOnly: true,
serializedName: "thumbnail",
type: {
name: "Composite",
className: "ImageObject"
}
},
imageInsightsToken: {
readOnly: true,
serializedName: "imageInsightsToken",
type: {
name: "String"
}
},
insightsMetadata: {
readOnly: true,
serializedName: "insightsMetadata",
type: {
name: "Composite",
className: "ImagesImageMetadata"
}
},
imageId: {
readOnly: true,
serializedName: "imageId",
type: {
name: "String"
}
},
accentColor: {
readOnly: true,
serializedName: "accentColor",
type: {
name: "String"
}
},
visualWords: {
readOnly: true,
serializedName: "visualWords",
type: {
name: "String"
}
}
}
}
};
export const ImageKnowledge: msRest.CompositeMapper = {
serializedName: "ImageKnowledge",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageKnowledge",
modelProperties: {
...Response.type.modelProperties,
tags: {
readOnly: true,
serializedName: "tags",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ImageTag"
}
}
}
},
image: {
readOnly: true,
serializedName: "image",
type: {
name: "Composite",
className: "ImageObject"
}
}
}
}
};
export const ErrorModel: msRest.CompositeMapper = {
serializedName: "Error",
type: {
name: "Composite",
className: "ErrorModel",
modelProperties: {
code: {
required: true,
serializedName: "code",
defaultValue: 'None',
type: {
name: "String"
}
},
subCode: {
readOnly: true,
serializedName: "subCode",
type: {
name: "String"
}
},
message: {
required: true,
serializedName: "message",
type: {
name: "String"
}
},
moreDetails: {
readOnly: true,
serializedName: "moreDetails",
type: {
name: "String"
}
},
parameter: {
readOnly: true,
serializedName: "parameter",
type: {
name: "String"
}
},
value: {
readOnly: true,
serializedName: "value",
type: {
name: "String"
}
}
}
}
};
export const ErrorResponse: msRest.CompositeMapper = {
serializedName: "ErrorResponse",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ErrorResponse",
modelProperties: {
...Response.type.modelProperties,
errors: {
required: true,
serializedName: "errors",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ErrorModel"
}
}
}
}
}
}
};
export const Person: msRest.CompositeMapper = {
serializedName: "Person",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Person",
modelProperties: {
...Thing.type.modelProperties,
jobTitle: {
readOnly: true,
serializedName: "jobTitle",
type: {
name: "String"
}
},
twitterProfile: {
readOnly: true,
serializedName: "twitterProfile",
type: {
name: "String"
}
}
}
}
};
export const ImageEntityAction: msRest.CompositeMapper = {
serializedName: "ImageEntityAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageEntityAction",
modelProperties: {
...ImageAction.type.modelProperties
}
}
};
export const ImagesModule: msRest.CompositeMapper = {
serializedName: "ImagesModule",
type: {
name: "Composite",
className: "ImagesModule",
modelProperties: {
value: {
readOnly: true,
serializedName: "value",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ImageObject"
}
}
}
}
}
}
};
export const ImageModuleAction: msRest.CompositeMapper = {
serializedName: "ImageModuleAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageModuleAction",
modelProperties: {
...ImageAction.type.modelProperties,
data: {
readOnly: true,
serializedName: "data",
type: {
name: "Composite",
className: "ImagesModule"
}
}
}
}
};
export const Recipe: msRest.CompositeMapper = {
serializedName: "Recipe",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "Recipe",
modelProperties: {
...CreativeWork.type.modelProperties,
cookTime: {
readOnly: true,
serializedName: "cookTime",
type: {
name: "String"
}
},
prepTime: {
readOnly: true,
serializedName: "prepTime",
type: {
name: "String"
}
},
totalTime: {
readOnly: true,
serializedName: "totalTime",
type: {
name: "String"
}
}
}
}
};
export const RecipesModule: msRest.CompositeMapper = {
serializedName: "RecipesModule",
type: {
name: "Composite",
className: "RecipesModule",
modelProperties: {
value: {
readOnly: true,
serializedName: "value",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Recipe"
}
}
}
}
}
}
};
export const ImageRecipesAction: msRest.CompositeMapper = {
serializedName: "ImageRecipesAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageRecipesAction",
modelProperties: {
...ImageAction.type.modelProperties,
data: {
readOnly: true,
serializedName: "data",
type: {
name: "Composite",
className: "RecipesModule"
}
}
}
}
};
export const Query: msRest.CompositeMapper = {
serializedName: "Query",
type: {
name: "Composite",
className: "Query",
modelProperties: {
text: {
required: true,
serializedName: "text",
type: {
name: "String"
}
},
displayText: {
readOnly: true,
serializedName: "displayText",
type: {
name: "String"
}
},
webSearchUrl: {
readOnly: true,
serializedName: "webSearchUrl",
type: {
name: "String"
}
},
searchLink: {
readOnly: true,
serializedName: "searchLink",
type: {
name: "String"
}
},
thumbnail: {
readOnly: true,
serializedName: "thumbnail",
type: {
name: "Composite",
className: "ImageObject"
}
}
}
}
};
export const RelatedSearchesModule: msRest.CompositeMapper = {
serializedName: "RelatedSearchesModule",
type: {
name: "Composite",
className: "RelatedSearchesModule",
modelProperties: {
value: {
readOnly: true,
serializedName: "value",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "Query"
}
}
}
}
}
}
};
export const ImageRelatedSearchesAction: msRest.CompositeMapper = {
serializedName: "ImageRelatedSearchesAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageRelatedSearchesAction",
modelProperties: {
...ImageAction.type.modelProperties,
data: {
readOnly: true,
serializedName: "data",
type: {
name: "Composite",
className: "RelatedSearchesModule"
}
}
}
}
};
export const ImageShoppingSourcesAction: msRest.CompositeMapper = {
serializedName: "ImageShoppingSourcesAction",
type: {
name: "Composite",
polymorphicDiscriminator: ResponseBase.type.polymorphicDiscriminator,
uberParent: "ResponseBase",
className: "ImageShoppingSourcesAction",
modelProperties: {
...ImageAction.type.modelProperties,
data: {
readOnly: true,
serializedName: "data",
type: {
name: "Composite",
className: "AggregateOffer"
}
}
}
}
};
export const CropArea: msRest.CompositeMapper = {
serializedName: "CropArea",
type: {
name: "Composite",
className: "CropArea",
modelProperties: {
top: {
required: true,
serializedName: "top",
type: {
name: "Number"
}
},
bottom: {
required: true,
serializedName: "bottom",
type: {
name: "Number"
}
},
left: {
required: true,
serializedName: "left",
type: {
name: "Number"
}
},
right: {
required: true,
serializedName: "right",
type: {
name: "Number"
}
}
}
}
};
export const ImageInfo: msRest.CompositeMapper = {
serializedName: "ImageInfo",
type: {
name: "Composite",
className: "ImageInfo",
modelProperties: {
imageInsightsToken: {
serializedName: "imageInsightsToken",
type: {
name: "String"
}
},
url: {
serializedName: "url",
type: {
name: "String"
}
},
cropArea: {
serializedName: "cropArea",
type: {
name: "Composite",
className: "CropArea"
}
}
}
}
};
export const Filters: msRest.CompositeMapper = {
serializedName: "Filters",
type: {
name: "Composite",
className: "Filters",
modelProperties: {
site: {
serializedName: "site",
type: {
name: "String"
}
}
}
}
};
export const KnowledgeRequest: msRest.CompositeMapper = {
serializedName: "KnowledgeRequest",
type: {
name: "Composite",
className: "KnowledgeRequest",
modelProperties: {
filters: {
serializedName: "filters",
type: {
name: "Composite",
className: "Filters"
}
}
}
}
};
export const VisualSearchRequest: msRest.CompositeMapper = {
serializedName: "VisualSearchRequest",
type: {
name: "Composite",
className: "VisualSearchRequest",
modelProperties: {
imageInfo: {
serializedName: "imageInfo",
type: {
name: "Composite",
className: "ImageInfo"
}
},
knowledgeRequest: {
serializedName: "knowledgeRequest",
type: {
name: "Composite",
className: "KnowledgeRequest"
}
}
}
}
};
export const discriminators = {
'ResponseBase.Point2D' : Point2D,
'ResponseBase.NormalizedQuadrilateral' : NormalizedQuadrilateral,
'ResponseBase.ImageAction' : ImageAction,
'ResponseBase.ImageTag' : ImageTag,
'ResponseBase.Organization' : Organization,
'PropertiesItem.AggregateRating' : AggregateRating,
'ResponseBase.Offer' : Offer,
'ResponseBase.AggregateOffer' : AggregateOffer,
'ResponseBase.ImageObject' : ImageObject,
'ResponseBase.ImageKnowledge' : ImageKnowledge,
'ResponseBase.Response' : Response,
'ResponseBase.Identifiable' : Identifiable,
'ResponseBase.ErrorResponse' : ErrorResponse,
'ResponseBase.Thing' : Thing,
'ResponseBase.Action' : Action,
'ResponseBase.MediaObject' : MediaObject,
'ResponseBase' : ResponseBase,
'ResponseBase.CreativeWork' : CreativeWork,
'ResponseBase.Person' : Person,
'ResponseBase.Intangible' : Intangible,
'ResponseBase.ImageEntityAction' : ImageEntityAction,
'ResponseBase.ImageModuleAction' : ImageModuleAction,
'ResponseBase.Recipe' : Recipe,
'ResponseBase.ImageRecipesAction' : ImageRecipesAction,
'ResponseBase.ImageRelatedSearchesAction' : ImageRelatedSearchesAction,
'ResponseBase.ImageShoppingSourcesAction' : ImageShoppingSourcesAction,
'ResponseBase.StructuredValue' : StructuredValue,
'Properties/Item' : PropertiesItem,
'PropertiesItem.Rating' : Rating
}; | the_stack |
//@ts-check
///<reference path="devkit.d.ts" />
declare namespace DevKit {
namespace Formmsdyn_agreement_Agreement {
interface Header extends DevKit.Controls.IHeader {
msdyn_name: DevKit.Controls.String;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
}
interface tab__4C8BEF3B_E06C_4411_B312_59F180556E4D_Sections {
_3568BE1D_2A14_40A9_B8CD_B04E3806E3E2: DevKit.Controls.Section;
_4C8BEF3B_E06C_4411_B312_59F180556E4D_SECTION_2: DevKit.Controls.Section;
_4C8BEF3B_E06C_4411_B312_59F180556E4D_SECTION_3: DevKit.Controls.Section;
tab_1_section_2: DevKit.Controls.Section;
tab_4_section_1: DevKit.Controls.Section;
}
interface tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_Sections {
_2BCF1BE9_1CEB_434D_9866_6486738F2ACC: DevKit.Controls.Section;
_7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_SECTION_3: DevKit.Controls.Section;
_BC490053_F7D4_44E2_BFB7_1CA938F59E1B: DevKit.Controls.Section;
}
interface tab_tab_3_Sections {
tab_3_section_3: DevKit.Controls.Section;
tab_3_section_4: DevKit.Controls.Section;
}
interface tab_tab_4_Sections {
tab_3_section_1: DevKit.Controls.Section;
}
interface tab__4C8BEF3B_E06C_4411_B312_59F180556E4D extends DevKit.Controls.ITab {
Section: tab__4C8BEF3B_E06C_4411_B312_59F180556E4D_Sections;
}
interface tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D extends DevKit.Controls.ITab {
Section: tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_Sections;
}
interface tab_tab_3 extends DevKit.Controls.ITab {
Section: tab_tab_3_Sections;
}
interface tab_tab_4 extends DevKit.Controls.ITab {
Section: tab_tab_4_Sections;
}
interface Tabs {
_4C8BEF3B_E06C_4411_B312_59F180556E4D: tab__4C8BEF3B_E06C_4411_B312_59F180556E4D;
_7A6686D3_72AD_4724_9C46_5BA3C5FAE32D: tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D;
tab_3: tab_tab_3;
tab_4: tab_tab_4;
}
interface Body {
Tab: Tabs;
/** Shows the details of this agreement, as presented to the customer */
msdyn_AgreementDetails: DevKit.Controls.String;
/** This field defines the time of day when Work Orders and Invoices are generated by the Agreement Booking Setups and Agreement Invoice Setups defined on this Agreement. If this field is blank, the logic will look to the overall setting on the Field Service Settings record. */
msdyn_AgreementRecordGeneration: DevKit.Controls.DateTime;
/** This field defines the time of day when Work Orders and Invoices are generated by the Agreement Booking Setups and Agreement Invoice Setups defined on this Agreement. If this field is blank, the logic will look to the overall setting on the Field Service Settings record. */
msdyn_AgreementRecordGeneration_1: DevKit.Controls.DateTime;
/** Account to be billed. If a billing account has been set on service account it will be populated by default. Otherwise, the billing account will be the same as the service account. */
msdyn_BillingAccount: DevKit.Controls.Lookup;
/** Enter the date this agreement was canceled. */
msdyn_DateCanceled: DevKit.Controls.Date;
/** Type a description of the agreement. */
msdyn_Description: DevKit.Controls.String;
/** Shows the duration how long this agreement is active. */
msdyn_Duration: DevKit.Controls.Integer;
/** Enter the date when this agreement is no longer active. */
msdyn_EndDate: DevKit.Controls.Date;
msdyn_name: DevKit.Controls.String;
/** If this agreement originates from another agreement, you should specify the originating agreement here */
msdyn_OriginatingAgreement: DevKit.Controls.Lookup;
/** Price List that controls pricing for products added to this Agreement. By default the system will use the Price List specified on the account */
msdyn_PriceList: DevKit.Controls.Lookup;
/** Tax Code to be used to calculate tax when Agreement is taxable. By default the system will use the tax code specified on the service account */
msdyn_SalesTaxCode: DevKit.Controls.Lookup;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
/** The service territory this Agreement relates to. By default this will be set to the Service Territory defined on the service account */
msdyn_ServiceTerritory: DevKit.Controls.Lookup;
/** Enter the date from when this agreement is active. */
msdyn_StartDate: DevKit.Controls.Date;
/** Agreement Substatus */
msdyn_SubStatus: DevKit.Controls.Lookup;
/** Tracks the current system status. */
msdyn_SystemStatus: DevKit.Controls.OptionSet;
/** Specify if Agreement is taxable. By default an Agreement is taxable if billing account is not tax exempt, and Agreement type is taxable. If any of the above is false it will be set to non-taxable. */
msdyn_Taxable: DevKit.Controls.Boolean;
notescontrol: DevKit.Controls.Note;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
WebResource_AgreementRecordGeneration_TimeField: DevKit.Controls.WebResource;
}
interface Navigation {
nav_msdyn_msdyn_agreement_invoicedetail_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreement_OriginatingAgreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingdate_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingincident_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingproduct_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingservice_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingservicetask_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingsetup_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoicedate_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoiceproduct_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoicesetup_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_workorder_Agreement: DevKit.Controls.NavigationItem,
navProcessSessions: DevKit.Controls.NavigationItem
}
interface ProcessAgreement_Business_Process {
/** Account to be billed. If a billing account has been set on service account it will be populated by default. Otherwise, the billing account will be the same as the service account. */
msdyn_BillingAccount: DevKit.Controls.Lookup;
/** Type a description of the agreement. */
msdyn_Description: DevKit.Controls.String;
/** Type a description of the agreement. */
msdyn_Description_1: DevKit.Controls.String;
/** Shows the duration how long this agreement is active. */
msdyn_Duration: DevKit.Controls.Integer;
/** Enter the date when this agreement is no longer active. */
msdyn_EndDate: DevKit.Controls.Date;
msdyn_name: DevKit.Controls.String;
/** Price List that controls pricing for products added to this Agreement. By default the system will use the Price List specified on the account */
msdyn_PriceList: DevKit.Controls.Lookup;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
/** Enter the date from when this agreement is active. */
msdyn_StartDate: DevKit.Controls.Date;
/** Agreement Substatus */
msdyn_SubStatus: DevKit.Controls.Lookup;
/** Agreement Substatus */
msdyn_SubStatus_1: DevKit.Controls.Lookup;
/** Tracks the current system status. */
msdyn_SystemStatus: DevKit.Controls.OptionSet;
/** Tracks the current system status. */
msdyn_SystemStatus_1: DevKit.Controls.OptionSet;
/** Tracks the current system status. */
msdyn_SystemStatus_2: DevKit.Controls.OptionSet;
}
interface Process extends DevKit.Controls.IProcess {
Agreement_Business_Process: ProcessAgreement_Business_Process;
}
interface Grid {
bookingsgrid: DevKit.Controls.Grid;
invoicegrid: DevKit.Controls.Grid;
ContractLines: DevKit.Controls.Grid;
QuotesLinesGrid: DevKit.Controls.Grid;
}
}
class Formmsdyn_agreement_Agreement extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form msdyn_agreement_Agreement
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form msdyn_agreement_Agreement */
Body: DevKit.Formmsdyn_agreement_Agreement.Body;
/** The Header section of form msdyn_agreement_Agreement */
Header: DevKit.Formmsdyn_agreement_Agreement.Header;
/** The Navigation of form msdyn_agreement_Agreement */
Navigation: DevKit.Formmsdyn_agreement_Agreement.Navigation;
/** The Process of form msdyn_agreement_Agreement */
Process: DevKit.Formmsdyn_agreement_Agreement.Process;
/** The Grid of form msdyn_agreement_Agreement */
Grid: DevKit.Formmsdyn_agreement_Agreement.Grid;
}
namespace FormAgreement_Mobile {
interface tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_Sections {
_2BCF1BE9_1CEB_434D_9866_6486738F2ACC: DevKit.Controls.Section;
_7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_SECTION_3: DevKit.Controls.Section;
_BC490053_F7D4_44E2_BFB7_1CA938F59E1B: DevKit.Controls.Section;
}
interface tab_fstab_general_Sections {
_4C8BEF3B_E06C_4411_B312_59F180556E4D_SECTION_3: DevKit.Controls.Section;
fstab_general_column_2_section_1: DevKit.Controls.Section;
fstab_general_section_details: DevKit.Controls.Section;
fstab_general_section_summary: DevKit.Controls.Section;
tab_3_section_2: DevKit.Controls.Section;
}
interface tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D extends DevKit.Controls.ITab {
Section: tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D_Sections;
}
interface tab_fstab_general extends DevKit.Controls.ITab {
Section: tab_fstab_general_Sections;
}
interface Tabs {
_7A6686D3_72AD_4724_9C46_5BA3C5FAE32D: tab__7A6686D3_72AD_4724_9C46_5BA3C5FAE32D;
fstab_general: tab_fstab_general;
}
interface Body {
Tab: Tabs;
/** Shows the details of this agreement, as presented to the customer */
msdyn_AgreementDetails: DevKit.Controls.String;
/** This field defines the time of day when Work Orders and Invoices are generated by the Agreement Booking Setups and Agreement Invoice Setups defined on this Agreement. If this field is blank, the logic will look to the overall setting on the Field Service Settings record. */
msdyn_AgreementRecordGeneration: DevKit.Controls.DateTime;
/** Account to be billed. If a billing account has been set on service account it will be populated by default. Otherwise, the billing account will be the same as the service account. */
msdyn_BillingAccount: DevKit.Controls.Lookup;
/** Enter the date this agreement was canceled. */
msdyn_DateCanceled: DevKit.Controls.Date;
/** Type a description of the agreement. */
msdyn_Description: DevKit.Controls.String;
/** Shows the duration how long this agreement is active. */
msdyn_Duration: DevKit.Controls.Integer;
/** Enter the date when this agreement is no longer active. */
msdyn_EndDate: DevKit.Controls.Date;
msdyn_name: DevKit.Controls.String;
/** If this agreement originates from another agreement, you should specify the originating agreement here */
msdyn_OriginatingAgreement: DevKit.Controls.Lookup;
/** Price List that controls pricing for products added to this Agreement. By default the system will use the Price List specified on the account */
msdyn_PriceList: DevKit.Controls.Lookup;
/** Tax Code to be used to calculate tax when Agreement is taxable. By default the system will use the tax code specified on the service account */
msdyn_SalesTaxCode: DevKit.Controls.Lookup;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
/** The service territory this Agreement relates to. By default this will be set to the Service Territory defined on the service account */
msdyn_ServiceTerritory: DevKit.Controls.Lookup;
/** Enter the date from when this agreement is active. */
msdyn_StartDate: DevKit.Controls.Date;
/** Agreement Substatus */
msdyn_SubStatus: DevKit.Controls.Lookup;
/** Tracks the current system status. */
msdyn_SystemStatus: DevKit.Controls.OptionSet;
/** Specify if Agreement is taxable. By default an Agreement is taxable if billing account is not tax exempt, and Agreement type is taxable. If any of the above is false it will be set to non-taxable. */
msdyn_Taxable: DevKit.Controls.Boolean;
notescontrol: DevKit.Controls.Note;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
WebResource_AgreementRecordGeneration_TimeField: DevKit.Controls.WebResource;
}
interface Navigation {
nav_msdyn_msdyn_agreement_invoicedetail_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreement_OriginatingAgreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingdate_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingincident_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingproduct_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingservice_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingservicetask_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementbookingsetup_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoicedate_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoiceproduct_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_agreementinvoicesetup_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_msdyn_workorder_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_quotedetail_Agreement: DevKit.Controls.NavigationItem,
nav_msdyn_msdyn_agreement_salesorderdetail_Agreement: DevKit.Controls.NavigationItem,
navAsyncOperations: DevKit.Controls.NavigationItem,
navProcessSessions: DevKit.Controls.NavigationItem
}
interface ProcessAgreement_Business_Process {
/** Account to be billed. If a billing account has been set on service account it will be populated by default. Otherwise, the billing account will be the same as the service account. */
msdyn_BillingAccount: DevKit.Controls.Lookup;
/** Type a description of the agreement. */
msdyn_Description: DevKit.Controls.String;
/** Type a description of the agreement. */
msdyn_Description_1: DevKit.Controls.String;
/** Shows the duration how long this agreement is active. */
msdyn_Duration: DevKit.Controls.Integer;
/** Enter the date when this agreement is no longer active. */
msdyn_EndDate: DevKit.Controls.Date;
msdyn_name: DevKit.Controls.String;
/** Price List that controls pricing for products added to this Agreement. By default the system will use the Price List specified on the account */
msdyn_PriceList: DevKit.Controls.Lookup;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.Controls.Lookup;
/** Enter the date from when this agreement is active. */
msdyn_StartDate: DevKit.Controls.Date;
/** Agreement Substatus */
msdyn_SubStatus: DevKit.Controls.Lookup;
/** Agreement Substatus */
msdyn_SubStatus_1: DevKit.Controls.Lookup;
/** Tracks the current system status. */
msdyn_SystemStatus: DevKit.Controls.OptionSet;
/** Tracks the current system status. */
msdyn_SystemStatus_1: DevKit.Controls.OptionSet;
/** Tracks the current system status. */
msdyn_SystemStatus_2: DevKit.Controls.OptionSet;
}
interface Process extends DevKit.Controls.IProcess {
Agreement_Business_Process: ProcessAgreement_Business_Process;
}
interface Grid {
bookingsgrid: DevKit.Controls.Grid;
invoicegrid: DevKit.Controls.Grid;
}
}
class FormAgreement_Mobile extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Agreement_Mobile
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Agreement_Mobile */
Body: DevKit.FormAgreement_Mobile.Body;
/** The Navigation of form Agreement_Mobile */
Navigation: DevKit.FormAgreement_Mobile.Navigation;
/** The Process of form Agreement_Mobile */
Process: DevKit.FormAgreement_Mobile.Process;
/** The Grid of form Agreement_Mobile */
Grid: DevKit.FormAgreement_Mobile.Grid;
}
class msdyn_agreementApi {
/**
* DynamicsCrm.DevKit msdyn_agreementApi
* @param entity The entity object
*/
constructor(entity?: any);
/**
* Get the value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedValue(alias: string, isMultiOptionSet?: boolean): any;
/**
* Get the formatted value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string;
/** The entity object */
Entity: any;
/** The entity name */
EntityName: string;
/** The entity collection name */
EntityCollectionName: string;
/** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */
"@odata.etag": string;
/** Unique identifier of the user who created the record. */
CreatedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was created. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who created the record on behalf of another user. */
CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the sequence number of the import that created this record. */
ImportSequenceNumber: DevKit.WebApi.IntegerValue;
/** Unique identifier of the user who modified the record. */
ModifiedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was last updated. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who last updated the record on behalf of another user. */
ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the details of this agreement, as presented to the customer */
msdyn_AgreementDetails: DevKit.WebApi.StringValue;
/** Shows the entity instances. */
msdyn_agreementId: DevKit.WebApi.GuidValue;
/** This field defines the time of day when Work Orders and Invoices are generated by the Agreement Booking Setups and Agreement Invoice Setups defined on this Agreement. If this field is blank, the logic will look to the overall setting on the Field Service Settings record. */
msdyn_AgreementRecordGeneration_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Internal field used to generate the next name upon entity creation. It is optionally copied to the msdyn_name field. */
msdyn_AutoNumbering: DevKit.WebApi.StringValue;
/** Account to be billed. If a billing account has been set on service account it will be populated by default. Otherwise, the billing account will be the same as the service account. */
msdyn_BillingAccount: DevKit.WebApi.LookupValue;
msdyn_ChildIndex: DevKit.WebApi.IntegerValue;
/** Enter the date this agreement was canceled. */
msdyn_DateCanceled_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Type a description of the agreement. */
msdyn_Description: DevKit.WebApi.StringValue;
/** Shows the duration how long this agreement is active. */
msdyn_Duration: DevKit.WebApi.IntegerValue;
/** Enter the date when this agreement is no longer active. */
msdyn_EndDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
msdyn_name: DevKit.WebApi.StringValue;
/** If this agreement originates from another agreement, you should specify the originating agreement here */
msdyn_OriginatingAgreement: DevKit.WebApi.LookupValue;
/** Price List that controls pricing for products added to this Agreement. By default the system will use the Price List specified on the account */
msdyn_PriceList: DevKit.WebApi.LookupValue;
/** For internal use only */
msdyn_ProcessStartedOn_TimezoneDateAndTime: DevKit.WebApi.TimezoneDateAndTimeValue;
/** Tax Code to be used to calculate tax when Agreement is taxable. By default the system will use the tax code specified on the service account */
msdyn_SalesTaxCode: DevKit.WebApi.LookupValue;
/** Account to be serviced */
msdyn_ServiceAccount: DevKit.WebApi.LookupValue;
/** The service territory this Agreement relates to. By default this will be set to the Service Territory defined on the service account */
msdyn_ServiceTerritory: DevKit.WebApi.LookupValue;
/** Enter the date from when this agreement is active. */
msdyn_StartDate_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Agreement Substatus */
msdyn_SubStatus: DevKit.WebApi.LookupValue;
/** Tracks the current system status. */
msdyn_SystemStatus: DevKit.WebApi.OptionSetValue;
/** Specify if Agreement is taxable. By default an Agreement is taxable if billing account is not tax exempt, and Agreement type is taxable. If any of the above is false it will be set to non-taxable. */
msdyn_Taxable: DevKit.WebApi.BooleanValue;
/** Shows the date and time that the record was migrated. */
OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */
OwnerId_systemuser: DevKit.WebApi.LookupValue;
/** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */
OwnerId_team: DevKit.WebApi.LookupValue;
/** Unique identifier for the business unit that owns the record */
OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the team that owns the record. */
OwningTeam: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the user that owns the record. */
OwningUser: DevKit.WebApi.LookupValueReadonly;
/** Contains the ID of the process associated with the entity. */
processid: DevKit.WebApi.GuidValue;
/** Contains the ID of the stage where the entity is located. */
stageid: DevKit.WebApi.GuidValue;
/** Status of the Agreement */
statecode: DevKit.WebApi.OptionSetValue;
/** Reason for the status of the Agreement */
statuscode: DevKit.WebApi.OptionSetValue;
/** For internal use only. */
TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue;
/** Shows a comma-separated list of string values that represent the unique identifiers of stages in a business process flow instance in the order that they occur. */
traversedpath: DevKit.WebApi.StringValue;
/** Shows the time zone code that was in use when the record was created. */
UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue;
/** Version Number */
VersionNumber: DevKit.WebApi.BigIntValueReadonly;
}
}
declare namespace OptionSet {
namespace msdyn_agreement {
enum msdyn_SystemStatus {
/** 690970001 */
Active,
/** 690970003 */
Canceled,
/** 690970000 */
Estimate,
/** 690970002 */
Expired
}
enum statecode {
/** 0 */
Active,
/** 1 */
Inactive
}
enum statuscode {
/** 1 */
Active,
/** 2 */
Inactive
}
enum RollupState {
/** 0 - Attribute value is yet to be calculated */
NotCalculated,
/** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */
Calculated,
/** 2 - Attribute value calculation lead to overflow error */
OverflowError,
/** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */
OtherError,
/** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */
RetryLimitExceeded,
/** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */
HierarchicalRecursionLimitReached,
/** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */
LoopDetected
}
}
}
//{'JsForm':['Agreement','Agreement - Mobile'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'} | the_stack |
import { Component, OnInit, Input } from "@angular/core";
import { ClipPathResizeMaskService } from "../../panel-scope-enchantment/clip-path-resize-mask.service";
import {
TPolygonType,
ClipPointModel,
ClipInsetModel,
ClipPolygonModel,
ClipCircleModel,
ClipEllipseModel,
} from "../../panel-scope-enchantment/model";
import { PanelWidgetModel } from "../../panel-widget/model";
import { Subscription } from "rxjs";
import { DraggablePort } from "@ng-public/directive/draggable/draggable.interface";
import { PanelScopeEnchantmentService } from "../../panel-scope-enchantment/panel-scope-enchantment.service";
import { debounceTime } from "rxjs/operators";
// 剪贴蒙版
@Component({
selector: "app-panel-widget-clip-path",
templateUrl: "./panel-widget-clip-path.component.html",
styleUrls: ["./panel-widget-clip-path.component.scss"],
})
export class PanelWidgetClipPathComponent implements OnInit {
private profileOuterSphereRX$: Subscription;
private clipPointDragRX$: Subscription;
// 订阅轮廓拉伸的变化
private profileTensileMoveRX$: Subscription;
@Input()
public widget: PanelWidgetModel;
// 父级宽高
public get widgetProfile(): { width: number; height: number } {
return { width: this.widget.profileModel.width, height: this.widget.profileModel.height };
}
// 正方形
public get clipInsetPath(): ClipInsetModel {
return this.clipPathResizeMaskService.clipInsetModel$.value;
}
// 多边形
public get clipPolygonPath(): ClipPolygonModel {
return this.clipPathResizeMaskService.clipPolygonModel$.value;
}
// 圆形
public get clipCirclePath(): ClipCircleModel {
return this.clipPathResizeMaskService.clipCircleModel$.value;
}
// 椭圆
public get clipEllipsePath(): ClipEllipseModel {
return this.clipPathResizeMaskService.clipEllipseModel$.value;
}
constructor(
private readonly clipPathResizeMaskService: ClipPathResizeMaskService,
private readonly panelScopeEnchantmentService: PanelScopeEnchantmentService
) {
this.clipPointDragRX$ = this.clipPathResizeMaskService.launchMouseDragClipPoint.subscribe(value => {
if (value && value.drag) {
if (this.clipPathResizeMaskService.clipPathMaskModel.currentPathType$.value == "inset") {
this.handleInsetClipDragPoint(value.drag, value.point);
} else if (this.clipPathResizeMaskService.clipPathMaskModel.currentPathType$.value == "polygon") {
this.handlePoltgonClipDragPoint(value.drag, value.point);
} else if (this.clipPathResizeMaskService.clipPathMaskModel.currentPathType$.value == "circle") {
this.handleCircleClipDragPoint(value.drag, value.point);
} else if (this.clipPathResizeMaskService.clipPathMaskModel.currentPathType$.value == "ellipse") {
this.handleEllipseClipDragPoint(value.drag, value.point);
}
}
});
this.profileOuterSphereRX$ = this.panelScopeEnchantmentService.scopeEnchantmentModel.profileOuterSphere$.subscribe(
pro => {
if (pro) {
if (this.profileTensileMoveRX$) this.profileTensileMoveRX$.unsubscribe();
this.profileTensileMoveRX$ = pro.valueChange$.pipe(debounceTime(1)).subscribe(() => {
if (
this.clipPathResizeMaskService.clipPathMaskModel.currentPathType$.value == "circle" &&
this.clipCirclePath
) {
this.clipCirclePath.handleCalcCircleSidePoint(this.widget.profileModel);
}
});
}
}
);
}
ngOnInit() {
const clipPath = this.widget.getObjKeyValue("panelClipPathModel");
if (clipPath) {
if (clipPath instanceof ClipInsetModel) {
this.clipPathResizeMaskService.createInsetClip(<ClipInsetModel>clipPath);
} else if (clipPath instanceof ClipEllipseModel) {
this.clipPathResizeMaskService.createEllipseClip(<ClipEllipseModel>clipPath);
} else if (clipPath instanceof ClipCircleModel) {
this.clipPathResizeMaskService.createCircleClip(<ClipCircleModel>clipPath);
} else if (clipPath instanceof ClipPolygonModel) {
this.clipPathResizeMaskService.createPolygonClip("", <ClipPolygonModel>clipPath);
}
}
}
ngOnDestroy() {
if (this.clipPointDragRX$) this.clipPointDragRX$.unsubscribe();
if (this.profileOuterSphereRX$) this.profileOuterSphereRX$.unsubscribe();
if (this.profileTensileMoveRX$) this.profileTensileMoveRX$.unsubscribe();
this.clipPathResizeMaskService.emptyClipPath();
}
/**
* 计算基于宽或高的百分比
*/
public calcPer(drag: DraggablePort, targ: number, type: "width" | "height" = "width"): number {
const calc =
(((targ / 100) * this.widgetProfile[type] + drag[type == "width" ? "left" : "top"]) /
this.widgetProfile[type]) *
100;
return calc <= 0 ? 0 : calc >= 100 ? 100 : calc;
}
/**
* 一键重置
*/
public resetClipData(): void {
this.clipPathResizeMaskService.emptyClipPath();
if (this.widget) {
this.widget.delStyleToUltimatelyStyle("clip-path");
this.widget.delObjKeyValue("panelClipPathModel");
}
}
/**
* 创建正方形蒙版
*/
public createInsetClip(): void {
this.clipPathResizeMaskService.createInsetClip();
if (this.widget) {
const inset = this.clipPathResizeMaskService.clipInsetModel$.value;
this.widget.addObjKeyValue({ panelClipPathModel: inset });
this.widget.addStyleToUltimatelyStyle(inset.styleContent);
}
}
/**
* 创建圆形
* 同时计算圆周上的点的位置
*/
public createCircleClip(): void {
this.clipPathResizeMaskService.createCircleClip();
if (this.widget) {
const circle = this.clipPathResizeMaskService.clipCircleModel$.value;
circle.handleCalcCircleSidePoint(this.widget.profileModel);
this.widget.addObjKeyValue({ panelClipPathModel: circle });
this.widget.addStyleToUltimatelyStyle(circle.styleContent);
}
}
/**
* 创建椭圆
*/
public createEllpiseClip(): void {
this.clipPathResizeMaskService.createEllipseClip();
if (this.widget) {
const ellipse = this.clipPathResizeMaskService.clipEllipseModel$.value;
this.widget.addObjKeyValue({ panelClipPathModel: ellipse });
this.widget.addStyleToUltimatelyStyle(ellipse.styleContent);
}
}
/**
* 创建多边形
*/
public createPolygon(type: TPolygonType): void {
this.clipPathResizeMaskService.createPolygonClip(type);
if (this.widget) {
const polygon = this.clipPathResizeMaskService.clipPolygonModel$.value;
this.widget.addObjKeyValue({ panelClipPathModel: polygon });
this.widget.addStyleToUltimatelyStyle(polygon.styleContent);
}
}
/**
* 拖拽椭圆的裁切点
*/
public handleEllipseClipDragPoint(drag: DraggablePort, point: ClipPointModel): void {
if (point.type === "ellipse-r") {
point.left = this.calcPer(drag, point.left);
} else if (point.type === "ellipse-b") {
point.top = this.calcPer(drag, point.top, "height");
} else if (point.type === "ellipse-c") {
const radiusX = Math.abs(this.clipEllipsePath.ellR.left - this.clipEllipsePath.ellC.left);
const radiusY = Math.abs(this.clipEllipsePath.ellB.top - this.clipEllipsePath.ellC.top);
point.left = this.calcPer(drag, point.left);
point.top = this.calcPer(drag, point.top, "height");
this.clipEllipsePath.ellR.top = this.calcPer(drag, this.clipEllipsePath.ellR.top, "height");
this.clipEllipsePath.ellB.left = this.calcPer(drag, this.clipEllipsePath.ellB.left);
if (point.left >= 50) {
this.clipEllipsePath.ellR.left = point.left - radiusX;
} else if (point.left < 50) {
this.clipEllipsePath.ellR.left = point.left + radiusX;
}
if (point.top >= 50) {
this.clipEllipsePath.ellB.top = point.top - radiusY;
} else if (point.top < 50) {
this.clipEllipsePath.ellB.top = point.top + radiusY;
}
}
this.widget.addStyleToUltimatelyStyle(this.clipEllipsePath.styleContent);
}
/**
* 拖拽圆形的裁切点
*/
public handleCircleClipDragPoint(drag: DraggablePort, point: ClipPointModel): void {
if (point.type == "circle-c") {
point.left = this.calcPer(drag, point.left);
point.top = this.calcPer(drag, point.top, "height");
this.clipCirclePath.handleCalcCircleSidePoint(this.widget.profileModel);
} else if (point.type == "circle-side") {
const k = this.clipCirclePath.calcSlopeK(this.widget.profileModel);
const b = this.clipCirclePath.calcSlopeB(this.widget.profileModel);
point.left += drag.left;
point.top = this.widget.profileModel.height - (k * point.left + b);
this.clipCirclePath.handleSideToRadius(this.widget.profileModel);
}
this.widget.addStyleToUltimatelyStyle(this.clipCirclePath.styleContent);
}
/**
* 拖拽多边形的裁切点
*/
public handlePoltgonClipDragPoint(drag: DraggablePort, point: ClipPointModel): void {
point.left = this.calcPer(drag, point.left);
point.top = this.calcPer(drag, point.top, "height");
this.widget.addStyleToUltimatelyStyle(this.clipPolygonPath.styleContent);
}
/**
* 拖拽正方形的裁切点
*/
public handleInsetClipDragPoint(drag: DraggablePort, point: ClipPointModel): void {
const mathObj = {
"inset-l": () => {
this.clipInsetPath.insetL.left = this.calcPer(drag, this.clipInsetPath.insetL.left);
this.clipInsetPath.insetT.left =
this.clipInsetPath.insetL.left +
(this.clipInsetPath.insetR.left - this.clipInsetPath.insetL.left) / 2;
this.clipInsetPath.insetB.left = this.clipInsetPath.insetT.left;
this.clipInsetPath.insetC.left = this.clipInsetPath.insetT.left;
},
"inset-t": () => {
this.clipInsetPath.insetT.top = this.calcPer(drag, this.clipInsetPath.insetT.top, "height");
this.clipInsetPath.insetL.top =
this.clipInsetPath.insetT.top + (this.clipInsetPath.insetB.top - this.clipInsetPath.insetT.top) / 2;
this.clipInsetPath.insetR.top = this.clipInsetPath.insetL.top;
this.clipInsetPath.insetC.top = this.clipInsetPath.insetL.top;
},
"inset-r": () => {
this.clipInsetPath.insetR.left = this.calcPer(drag, this.clipInsetPath.insetR.left);
this.clipInsetPath.insetT.left =
this.clipInsetPath.insetL.left +
(this.clipInsetPath.insetR.left - this.clipInsetPath.insetL.left) / 2;
this.clipInsetPath.insetB.left = this.clipInsetPath.insetT.left;
this.clipInsetPath.insetC.left = this.clipInsetPath.insetT.left;
},
"inset-b": () => {
this.clipInsetPath.insetB.top = this.calcPer(drag, this.clipInsetPath.insetB.top, "height");
this.clipInsetPath.insetL.top =
this.clipInsetPath.insetT.top + (this.clipInsetPath.insetB.top - this.clipInsetPath.insetT.top) / 2;
this.clipInsetPath.insetR.top = this.clipInsetPath.insetL.top;
this.clipInsetPath.insetC.top = this.clipInsetPath.insetL.top;
},
"inset-c": () => {
// 验证横向是否已达到边缘,是的话则不允许左右移动
const isSideV = (): boolean => {
return (
this.calcPer(drag, this.clipInsetPath.insetL.left) == 0 ||
this.calcPer(drag, this.clipInsetPath.insetR.left) == 100
);
};
// 验证纵向是否已达到边缘,是的话则不允许上下移动
const isSideH = (): boolean => {
return (
this.calcPer(drag, this.clipInsetPath.insetT.top, "height") == 0 ||
this.calcPer(drag, this.clipInsetPath.insetB.top, "height") == 100
);
};
if (!isSideV()) {
this.clipInsetPath.insetC.left = this.calcPer(drag, this.clipInsetPath.insetC.left);
this.clipInsetPath.insetL.left = this.calcPer(drag, this.clipInsetPath.insetL.left);
this.clipInsetPath.insetT.left = this.calcPer(drag, this.clipInsetPath.insetT.left);
this.clipInsetPath.insetR.left = this.calcPer(drag, this.clipInsetPath.insetR.left);
this.clipInsetPath.insetB.left = this.calcPer(drag, this.clipInsetPath.insetB.left);
}
if (!isSideH()) {
this.clipInsetPath.insetC.top = this.calcPer(drag, this.clipInsetPath.insetC.top, "height");
this.clipInsetPath.insetL.top = this.calcPer(drag, this.clipInsetPath.insetL.top, "height");
this.clipInsetPath.insetT.top = this.calcPer(drag, this.clipInsetPath.insetT.top, "height");
this.clipInsetPath.insetR.top = this.calcPer(drag, this.clipInsetPath.insetR.top, "height");
this.clipInsetPath.insetB.top = this.calcPer(drag, this.clipInsetPath.insetB.top, "height");
}
},
};
if (mathObj[point.type]) {
mathObj[point.type]();
}
this.widget.addStyleToUltimatelyStyle(this.clipInsetPath.styleContent);
}
} | the_stack |
import { DataTypeConfig, FieldType } from '@terascope/types';
import { BigMap } from '@terascope/utils';
import {
Column,
KeyAggregation, ValueAggregation, valueAggMap,
FieldAgg, KeyAggFn, keyAggMap, makeUniqueKeyAgg
} from '../column';
import { getCommonTupleType, isNumberLike, Vector } from '../vector';
import { Builder } from '../builder';
import { getBuilderForField, getMaxColumnSize } from './utils';
import {
FieldArg, flattenStringArg, freezeArray, getFieldsFromArg
} from '../core';
import { columnsToDataTypeConfig, makeKeyForRow } from '../data-frame/utils';
/**
* A deferred execution frame dedicated to running a aggregations.
*
* This is different from a DataFrame for a few reasons:
* - GroupBy and aggregations have to run at the same time in-order to get the correctly results.
* - The operations are added to an instruction set and in one optimized execution.
* - All methods in the AggregationFrame will mutate the execution
* instructions instead of return a new instance with the applied changes.
*/
export class AggregationFrame<
T extends Record<string, any>
> {
/**
* The name of the Frame
*/
name?: string;
/**
* The list of columns
*/
columns: readonly Column<any, keyof T>[];
/**
* An array of the column names
*/
fields: readonly (keyof T)[];
/**
* Metadata about the Frame
*/
readonly metadata: Record<string, any>;
/**
* The field to sort by
* @internal
*/
protected _sortFields?: (readonly (keyof T)[])|(readonly string[]);
/**
* The number of records to limit the result by
*/
protected _limit?: number;
/**
* The Aggregations to run
*/
protected readonly _aggregations = new Map<keyof T, AggObject>();
/**
* Group By fields
*/
protected _groupByFields: readonly (keyof T)[];
/**
* The field to sort by
*/
protected _selectFields?: readonly (keyof T)[];
/**
* Use this to cache the the column index needed, this
* should speed things up
*/
protected _fieldToColumnIndexCache?: Map<(keyof T), number>;
constructor(
columns: Column<any, keyof T>[]|readonly Column<any, keyof T>[],
options: AggregationFrameOptions
) {
this.columns = freezeArray(columns);
this.fields = Object.freeze(this.columns.map((col) => col.name));
this.name = options?.name;
this.metadata = options?.metadata ? { ...options.metadata } : {};
this._groupByFields = [];
}
/**
* Get the number of records in the AggregationFrame
*/
get size(): number {
return getMaxColumnSize(this.columns);
}
/**
* Generate the DataType config from the columns.
*/
get config(): DataTypeConfig {
return columnsToDataTypeConfig(this.columns);
}
get id(): string {
throw new Error(`${this.constructor.name}.id is currently unsupported`);
}
/**
* GroupBy fields
*/
groupBy(...fieldArg: FieldArg<keyof T>[]): this {
this._groupByFields = Object.freeze(this._groupByFields.concat(
Array.from(getFieldsFromArg(this.fields, fieldArg))
));
return this;
}
/**
* Calculate the average value in a column
*
* @note only works numeric data types
*
* @param field the name of the column to run the aggregation on
* @param as a optional name for the new column with the aggregated values
*/
[ValueAggregation.avg](field: keyof T): this;
[ValueAggregation.avg]<A extends string>(
field: keyof T, as: A
): AggregationFrame<WithAlias<T, A, number>>;
[ValueAggregation.avg]<A extends string>(
field: keyof T,
as?: A
): this|AggregationFrame<WithAlias<T, A, number>> {
const { name } = this._ensureColumn(field, as);
this._ensureNumericLike(name, ValueAggregation.avg);
const aggObject = this._aggregations.get(name) ?? { };
aggObject.value = ValueAggregation.avg;
this._aggregations.set(name, aggObject);
return this as AggregationFrame<T|WithAlias<T, A, number>>;
}
/**
* Add all of the values in a column together
*
* @note only works numeric data types
*
* @param field the name of the column to run the aggregation on
* @param as a optional name for the new column with the aggregated values
*/
[ValueAggregation.sum](field: keyof T): this;
[ValueAggregation.sum]<A extends string>(
field: keyof T, as: A
): AggregationFrame<WithAlias<T, A, number>>;
[ValueAggregation.sum]<A extends string>(
field: keyof T,
as?: A
): this|AggregationFrame<WithAlias<T, A, number>> {
const { name } = this._ensureColumn(field, as);
this._ensureNumericLike(name, ValueAggregation.sum);
const aggObject = this._aggregations.get(name) ?? { };
aggObject.value = ValueAggregation.sum;
this._aggregations.set(name, aggObject);
return this as AggregationFrame<T|WithAlias<T, A, number>>;
}
/**
* Find the minimum value in a column
*
* @note only works numeric data types
*
* @param field the name of the column to run the aggregation on
* @param as a optional name for the new column with the aggregated values
*/
[ValueAggregation.min](field: keyof T): this;
[ValueAggregation.min]<A extends string>(
field: keyof T, as: A
): AggregationFrame<WithAlias<T, A, number>>;
[ValueAggregation.min]<A extends string>(
field: keyof T,
as?: A
): this|AggregationFrame<WithAlias<T, A, number>> {
const { name } = this._ensureColumn(field, as);
this._ensureNumericLike(name, ValueAggregation.min);
const aggObject = this._aggregations.get(name) ?? { };
aggObject.value = ValueAggregation.min;
this._aggregations.set(name, aggObject);
return this as AggregationFrame<T|WithAlias<T, A, number>>;
}
/**
* Find the maximum value in a column
*
* @note only works numeric data types
*
* @param field the name of the column to run the aggregation on
* @param as a optional name for the new column with the aggregated values
*/
[ValueAggregation.max](field: keyof T): this;
[ValueAggregation.max]<A extends string>(
field: keyof T, as: A
): AggregationFrame<WithAlias<T, A, number>>;
[ValueAggregation.max]<A extends string>(
field: keyof T,
as?: A
): this|AggregationFrame<WithAlias<T, A, number>> {
const { name } = this._ensureColumn(field, as);
this._ensureNumericLike(name, ValueAggregation.max);
const aggObject = this._aggregations.get(name) ?? { };
aggObject.value = ValueAggregation.max;
this._aggregations.set(name, aggObject);
return this as AggregationFrame<T|WithAlias<T, A, number>>;
}
/**
* Count all of the values in a column
*
* @param field the name of the column to run the aggregation on
* @param as a optional name for the new column with the aggregated values
*/
[ValueAggregation.count](field: keyof T): this;
[ValueAggregation.count]<A extends string>(
field: keyof T, as: A
): AggregationFrame<WithAlias<T, A, number>>;
[ValueAggregation.count]<A extends string>(
field: keyof T,
as?: A
): this|AggregationFrame<WithAlias<T, A, number>> {
const { name } = this._ensureColumn(field, as);
const aggObject = this._aggregations.get(name) ?? { };
aggObject.value = ValueAggregation.count;
this._aggregations.set(name, aggObject);
return this as AggregationFrame<T|WithAlias<T, A, number>>;
}
/**
* Group the data in hourly buckets
*
* @note only works Date data types
*
* @param field the name of the column to run the aggregation on
*/
[KeyAggregation.hourly](field: keyof T): this {
const { name, type } = this._ensureColumn(field);
if (type !== FieldType.Date) {
throw new Error(`${KeyAggregation.hourly} requires a ${FieldType.Date} field type`);
}
const aggObject = this._aggregations.get(name) ?? { };
aggObject.key = KeyAggregation.hourly;
this._aggregations.set(name, aggObject);
return this;
}
/**
* Group the data in daily buckets
*
* @note only works Date data types
*
* @param field the name of the column to run the aggregation on
*/
[KeyAggregation.daily](field: keyof T): this {
const { name, type } = this._ensureColumn(field);
if (type !== FieldType.Date) {
throw new Error(`${KeyAggregation.daily} requires a ${FieldType.Date} field type`);
}
const aggObject = this._aggregations.get(name) ?? { };
aggObject.key = KeyAggregation.daily;
this._aggregations.set(name, aggObject);
return this;
}
/**
* Group the data in monthly buckets
*
* @note only works Date data types
*
* @param field the name of the column to run the aggregation on
*/
[KeyAggregation.monthly](field: keyof T): this {
const { name, type } = this._ensureColumn(field);
if (type !== FieldType.Date) {
throw new Error(`${KeyAggregation.monthly} requires a ${FieldType.Date} field type`);
}
const aggObject = this._aggregations.get(name) ?? { };
aggObject.key = KeyAggregation.monthly;
this._aggregations.set(name, aggObject);
return this;
}
/**
* Group the data in yearly buckets
*
* @note only works Date data types
*
* @param field the name of the column to run the aggregation on
*/
[KeyAggregation.yearly](field: keyof T): this {
const { name, type } = this._ensureColumn(field);
if (type !== FieldType.Date) {
throw new Error(`${KeyAggregation.yearly} requires a ${FieldType.Date} field type`);
}
const aggObject = this._aggregations.get(name) ?? { };
aggObject.key = KeyAggregation.yearly;
this._aggregations.set(name, aggObject);
return this;
}
/**
* Order the rows by fields, format of is `field:asc` or `field:desc`.
* Defaults to `asc` if none specified
*/
orderBy(...fieldArgs: FieldArg<string>[]): this;
orderBy(...fieldArgs: FieldArg<keyof T>[]): this;
orderBy(...fieldArgs: (FieldArg<keyof T>[]|FieldArg<string>[])): this {
const fields = flattenStringArg(fieldArgs);
if (fields.size > 1) {
throw new Error('AggregationFrame.orderBy can only works with one field currently');
}
this._sortFields = Object.freeze([...fields]);
return this;
}
/**
* Sort the records by a field, an alias of orderBy.
*
* @see orderBy
*/
sort(...fieldArgs: FieldArg<string>[]): this;
sort(...fieldArgs: FieldArg<keyof T>[]): this;
sort(...fieldArgs: (FieldArg<keyof T>[]|FieldArg<string>[])): this {
return this.orderBy(...fieldArgs);
}
/**
* Limit the number of results being returned
*/
limit(num: number): this {
this._limit = num;
return this;
}
/**
* After the aggregations run, return only these selected fields
*/
select<K extends keyof T>(...fieldArg: FieldArg<K>[]): AggregationFrame<Pick<T, K>> {
this._selectFields = Object.freeze([...getFieldsFromArg(this.fields, fieldArg)]);
return this as any;
}
/**
* Get a column by name
*/
getColumn<P extends keyof T>(field: P): Column<T[P], P>|undefined {
if (this._fieldToColumnIndexCache?.has(field)) {
return this.getColumnAt<P>(this._fieldToColumnIndexCache.get(field)!);
}
const index = this.columns.findIndex((col) => col.name === field);
if (!this._fieldToColumnIndexCache) {
this._fieldToColumnIndexCache = new Map([[field, index]]);
} else {
this._fieldToColumnIndexCache.set(field, index);
}
return this.getColumnAt<P>(index);
}
/**
* Get a column by name or throw if not found
*/
getColumnOrThrow<P extends keyof T>(field: P): Column<T[P], P> {
const column = this.getColumn(field);
if (!column) {
throw new Error(`Unknown column ${field} in${
this.name ? ` ${this.name}` : ''
} ${this.constructor.name}`);
}
return column;
}
/**
* Get a column by index
*/
getColumnAt<P extends keyof T>(index: number): Column<T[P], P>|undefined {
return this.columns[index] as Column<any, P>|undefined;
}
/**
* Rename an existing column
*/
rename<K extends keyof T, R extends string>(
name: K,
renameTo: R,
): AggregationFrame<Omit<T, K> & Record<R, T[K]>> {
this.columns = Object.freeze(
this.columns.map((col): Column<any, any> => {
if (col.name !== name) return col;
return col.rename(renameTo);
})
);
this._fieldToColumnIndexCache?.clear();
this.fields = Object.freeze(this.fields.map((field) => {
if (field === name) return renameTo;
return field;
}));
this._sortFields = this._sortFields ? Object.freeze(
(this._sortFields as string[]).map((field) => {
if (field === name) return renameTo;
if (field.startsWith(`${name}:`)) {
return field.replace(`${name}:`, `${renameTo}:`);
}
return field;
}) as any[]
) : this._sortFields;
this._selectFields = this._selectFields ? Object.freeze(
this._selectFields.map((field) => {
if (field === name) return renameTo;
return field;
})
) : this._selectFields;
this._groupByFields = Object.freeze(
this._groupByFields.map((field) => {
if (field === name) return renameTo;
return field;
})
);
const agg = this._aggregations.get(name);
if (agg) {
this._aggregations.delete(name);
this._aggregations.set(renameTo, agg);
}
return this as any;
}
/**
* Assign new columns, if given a column already exists,
* the column will replace the existing one.
*/
assign<R extends Record<string, unknown> = Record<string, any>>(
columns: readonly Column<any>[]
): AggregationFrame<T & R> {
const newColumns = columns.filter((col) => {
if (this.getColumn(col.name)) return false;
return true;
});
this.columns = Object.freeze(this.columns.map((col) => {
const replaceCol = columns.find((c) => c.name === col.name);
if (replaceCol) return replaceCol;
return col;
}).concat(newColumns)) as readonly Column<any>[];
this._fieldToColumnIndexCache?.clear();
this.fields = Object.freeze(
this.fields.concat(newColumns.map((col) => col.name))
);
return this as any;
}
private _ensureColumn(field: keyof T, as?: string): {
name: keyof T,
type: FieldType
} {
const col = this.getColumnOrThrow(field);
if (as) {
const columns: Column<any, keyof T>[] = [];
for (const c of this.columns) {
columns.push(c);
if (c === col) {
columns.push(c.rename(as));
this.fields = Object.freeze(this.fields.concat(as));
}
}
this.columns = Object.freeze(columns);
this._fieldToColumnIndexCache?.clear();
return {
name: as,
type: col.config.type as FieldType
};
}
return {
name: col.name,
type: col.config.type as FieldType
};
}
private _ensureNumericLike(field: keyof T, ctx: string): void {
const column = this.getColumnOrThrow(field);
let fieldType: FieldType;
if (column.config.type === FieldType.Tuple) {
fieldType = getCommonTupleType(
field as string, column.vector.childConfig
);
} else {
fieldType = column.config.type as FieldType;
}
if (!isNumberLike(fieldType)) {
throw new Error(`${ctx} requires a numeric field type`);
}
}
/**
* Execute the aggregations and flatten the grouped data.
* Assigns the new columns to this.
*
* @todo move the limit and sort logic to here
*/
async execute(): Promise<this> {
if (!this._aggregations.size && !this._groupByFields.length) {
throw new Error('No aggregations specified to run');
}
const buckets = this._generateBuckets(
this._aggregationBuilders()
);
const builders = this._generateBuilders(buckets.size);
for (const [key, bucket] of buckets) {
this._buildBucket(builders, bucket);
// free up that memory
buckets.delete(key);
}
this.columns = Object.freeze([...builders].map(([name, builder]) => {
const column = this.getColumnOrThrow(name);
return column.fork(builder.toVector());
}));
this._fieldToColumnIndexCache?.clear();
return this;
}
/**
* Reset the Aggregations
*/
reset(): this {
this._limit = undefined;
this._sortFields = undefined;
this._selectFields = undefined;
this._groupByFields = Object.freeze([]);
this._aggregations.clear();
return this;
}
private _generateBuckets(
{ keyAggs, fieldAggMakers }: AggBuilders<T>
): BigMap<string, Bucket<T>> {
const buckets = new BigMap<string, Bucket<T>>();
const getFieldAggs = makeGetFieldAggs(buckets);
for (let i = 0; i < this.size; i++) {
const res = makeKeyForRow(keyAggs, i);
if (res) {
fieldAggMakers.forEach(
makeProcessFieldAgg(getFieldAggs(res.key, i), i)
);
}
}
return buckets;
}
private _generateBuilders(size: number): Map<keyof T, Builder<any>> {
const builders = new Map<keyof T, Builder<any>>();
for (const col of this.columns) {
if (!this._selectFields?.length || this._selectFields.includes(col.name)) {
const agg = this._aggregations.get(col.name);
const builder = getBuilderForField(
col, size, agg?.key, agg?.value
);
builders.set(col.name, builder);
}
}
return builders;
}
private _buildBucket(
builders: Map<keyof T, Builder<any>>,
bucket: Bucket<T>,
): void {
if (bucket[0].adjustsSelectedRow) {
return this._buildBucketWithAdjustedRowIndex(builders, bucket);
}
const [fieldAggs, startIndex] = bucket;
for (const [field, builder] of builders) {
const agg = fieldAggs.get(field);
if (agg != null) {
builder.append(agg.flush().value);
} else {
const value = this.getColumnOrThrow(field).vector.get(startIndex);
const writeIndex = builder.currentIndex++;
if (value != null) {
// doing this is faster than append
// because we KNOW it is already in a valid format
builder.data.set(
writeIndex,
value
);
}
}
}
}
/**
* this is slower version of the aggregation
* that will select a specific row that shows more
* correct information, like for min and max
*/
private _buildBucketWithAdjustedRowIndex(
builders: Map<keyof T, Builder<any>>,
[fieldAggs, startIndex]: Bucket<T>,
): void {
let useIndex = startIndex;
const remainingFields: (keyof T)[] = [];
for (const [field, builder] of builders) {
const agg = fieldAggs.get(field);
if (agg != null) {
const res = agg.flush();
if (res.index != null && res.index > useIndex) {
useIndex = res.index;
}
builder.append(res.value);
} else {
remainingFields.push(field);
}
}
for (const field of remainingFields) {
const builder = builders.get(field)!;
const value = this.getColumnOrThrow(field).vector.get(useIndex);
const writeIndex = builder.currentIndex++;
if (value != null) {
// doing this is faster than append
// because we KNOW it is already in a valid format
builder.data.set(writeIndex, value);
}
}
}
private _aggregationBuilders(): AggBuilders<T> {
const fieldAggMakers = new Map<keyof T, FieldAggMaker>();
const keyAggs = new Map<keyof T, KeyAggFn>();
for (const col of this.columns) {
const agg = this._aggregations.get(col.name);
const isInGroupBy = this._groupByFields.includes(col.name);
if (isInGroupBy) {
keyAggs.set(col.name, makeUniqueKeyAgg(col.vector));
}
if (agg) {
if (agg.value) {
fieldAggMakers.set(col.name, curryFieldAgg(agg.value, col.vector));
}
if (agg.key) {
if (isInGroupBy) {
throw new Error(
`Invalid to combination of groupBy and ${agg.key} for field ${col.name}`
);
}
keyAggs.set(col.name, keyAggMap[agg.key](col.vector));
}
}
}
return {
fieldAggMakers, keyAggs
};
}
}
type AggObject = {
key?: KeyAggregation; value?: ValueAggregation
};
type WithAlias<T extends Record<string, unknown>, A extends string, V> = {
[P in (keyof T)|A]: V;
}
type FieldAggMaker = [fieldAgg: () => FieldAgg, vector: Vector<any>];
type FieldAggsMap<T extends Record<string, unknown>> = Map<keyof T, FieldAgg> & {
adjustsSelectedRow: boolean
};
type Bucket<T extends Record<string, unknown>> = [
fieldAggs: FieldAggsMap<T>, startIndex: number
];
interface AggBuilders<T extends Record<string, any>> {
fieldAggMakers: Map<keyof T, FieldAggMaker>;
keyAggs: Map<keyof T, KeyAggFn>;
}
/**
* AggregationFrame options
*/
export interface AggregationFrameOptions {
name?: string;
metadata?: Record<string, any>;
}
function curryFieldAgg(
valueAgg: ValueAggregation,
vector: Vector<any>
): FieldAggMaker {
return [() => valueAggMap[valueAgg](vector), vector];
}
function makeGetFieldAggs<T extends Record<string, any>>(buckets: BigMap<string, Bucket<T>>) {
return function getFieldAggs(key: string, index: number): FieldAggsMap<T> {
const fieldAggRes = buckets.get(key);
if (!fieldAggRes) {
const fieldAggs = new Map() as FieldAggsMap<T>;
fieldAggs.adjustsSelectedRow = false;
buckets.set(key, [fieldAggs, index]);
return fieldAggs;
}
return fieldAggRes[0];
};
}
function makeProcessFieldAgg<T extends Record<string, any>>(
fieldAggs: FieldAggsMap<T>, index: number
) {
return function processFieldAgg(maker: FieldAggMaker, field: keyof T) {
let agg = fieldAggs.get(field);
if (!agg) {
agg = maker[0]();
fieldAggs.set(field, agg);
}
if (agg.adjustsSelectedRow) {
fieldAggs.adjustsSelectedRow = true;
}
agg.push(maker[1].get(index), index);
};
} | the_stack |
// clang-format off
import {webUIListenerCallback} from 'chrome://resources/js/cr.m.js';
import {flush} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {HatsBrowserProxyImpl, LifetimeBrowserProxyImpl, MetricsBrowserProxyImpl, OpenWindowProxyImpl, PasswordCheckReferrer, PasswordManagerImpl, Router, routes, SafetyCheckBrowserProxy, SafetyCheckBrowserProxyImpl, SafetyCheckCallbackConstants, SafetyCheckChromeCleanerStatus, SafetyCheckExtensionsStatus, SafetyCheckIconStatus, SafetyCheckInteractions, SafetyCheckParentStatus, SafetyCheckPasswordsStatus, SafetyCheckSafeBrowsingStatus, SafetyCheckUpdatesStatus, SettingsSafetyCheckChildElement, SettingsSafetyCheckExtensionsChildElement, SettingsSafetyCheckPageElement, SettingsSafetyCheckPasswordsChildElement, SettingsSafetyCheckSafeBrowsingChildElement, SettingsSafetyCheckUpdatesChildElement, TrustSafetyInteraction} from 'chrome://settings/settings.js';
import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js';
import {TestBrowserProxy} from 'chrome://webui-test/test_browser_proxy.js';
import {TestHatsBrowserProxy} from './test_hats_browser_proxy.js';
import {TestLifetimeBrowserProxy} from './test_lifetime_browser_proxy.js';
import {TestMetricsBrowserProxy} from './test_metrics_browser_proxy.js';
import {TestOpenWindowProxy} from './test_open_window_proxy.js';
import {TestPasswordManagerProxy} from './test_password_manager_proxy.js';
// clang-format on
const testDisplayString = 'Test display string';
/**
* Fire a safety check parent event.
*/
function fireSafetyCheckParentEvent(state: SafetyCheckParentStatus) {
const event = {
newState: state,
displayString: testDisplayString,
};
webUIListenerCallback(SafetyCheckCallbackConstants.PARENT_CHANGED, event);
}
/**
* Fire a safety check updates event.
*/
function fireSafetyCheckUpdatesEvent(state: SafetyCheckUpdatesStatus) {
const event = {
newState: state,
displayString: testDisplayString,
};
webUIListenerCallback(SafetyCheckCallbackConstants.UPDATES_CHANGED, event);
}
/**
* Fire a safety check passwords event.
*/
function fireSafetyCheckPasswordsEvent(state: SafetyCheckPasswordsStatus) {
const event = {
newState: state,
displayString: testDisplayString,
passwordsButtonString: null,
};
webUIListenerCallback(SafetyCheckCallbackConstants.PASSWORDS_CHANGED, event);
}
/**
* Fire a safety check safe browsing event.
*/
function fireSafetyCheckSafeBrowsingEvent(
state: SafetyCheckSafeBrowsingStatus) {
const event = {
newState: state,
displayString: testDisplayString,
};
webUIListenerCallback(
SafetyCheckCallbackConstants.SAFE_BROWSING_CHANGED, event);
}
/**
* Fire a safety check extensions event.
*/
function fireSafetyCheckExtensionsEvent(state: SafetyCheckExtensionsStatus) {
const event = {
newState: state,
displayString: testDisplayString,
};
webUIListenerCallback(SafetyCheckCallbackConstants.EXTENSIONS_CHANGED, event);
}
/**
* Fire a safety check Chrome cleaner event.
*/
function fireSafetyCheckChromeCleanerEvent(
state: SafetyCheckChromeCleanerStatus) {
const event = {
newState: state,
displayString: testDisplayString,
};
webUIListenerCallback(
SafetyCheckCallbackConstants.CHROME_CLEANER_CHANGED, event);
}
type SafetyCheckChildExpectation = {
page: HTMLElement,
iconStatus: SafetyCheckIconStatus,
label: string,
buttonLabel?: string,
buttonAriaLabel?: string,
buttonClass?: string,
managedIcon?: boolean,
rowClickable?: boolean,
};
/**
* Verify that the safety check child inside the page has been configured as
* specified.
*/
function assertSafetyCheckChild({
page,
iconStatus,
label,
buttonLabel,
buttonAriaLabel,
buttonClass,
managedIcon,
rowClickable
}: SafetyCheckChildExpectation) {
const safetyCheckChild =
page.shadowRoot!.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!;
assertTrue(safetyCheckChild.iconStatus === iconStatus);
assertTrue(safetyCheckChild.label === label);
assertTrue(safetyCheckChild.subLabel === testDisplayString);
assertTrue(!buttonLabel || safetyCheckChild.buttonLabel === buttonLabel);
assertTrue(
!buttonAriaLabel || safetyCheckChild.buttonAriaLabel === buttonAriaLabel);
assertTrue(!buttonClass || safetyCheckChild.buttonClass === buttonClass);
assertTrue(!!managedIcon === !!safetyCheckChild.managedIcon);
assertTrue(!!rowClickable === !!safetyCheckChild.rowClickable);
}
class TestSafetyCheckBrowserProxy extends TestBrowserProxy implements
SafetyCheckBrowserProxy {
private parentRanDisplayString_ = '';
constructor() {
super([
'getParentRanDisplayString',
'runSafetyCheck',
]);
}
runSafetyCheck() {
this.methodCalled('runSafetyCheck');
}
setParentRanDisplayString(s: string) {
this.parentRanDisplayString_ = s;
}
getParentRanDisplayString() {
this.methodCalled('getParentRanDisplayString');
return Promise.resolve(this.parentRanDisplayString_);
}
}
suite('SafetyCheckPageUiTests', function() {
let metricsBrowserProxy: TestMetricsBrowserProxy;
let safetyCheckBrowserProxy: TestSafetyCheckBrowserProxy;
let page: SettingsSafetyCheckPageElement;
setup(function() {
metricsBrowserProxy = new TestMetricsBrowserProxy();
MetricsBrowserProxyImpl.setInstance(metricsBrowserProxy);
safetyCheckBrowserProxy = new TestSafetyCheckBrowserProxy();
safetyCheckBrowserProxy.setParentRanDisplayString('Dummy string');
SafetyCheckBrowserProxyImpl.setInstance(safetyCheckBrowserProxy);
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-page');
document.body.appendChild(page);
flush();
});
teardown(function() {
page.remove();
});
/** Tests parent element and collapse.from start to completion */
test('testParentAndCollapse', async function() {
// Before the check, only the text button is present.
assertTrue(!!page.shadowRoot!.querySelector('#safetyCheckParentButton'));
assertFalse(!!page.shadowRoot!.querySelector('cr-icon-button'));
// Collapse is not opened.
const collapse = page.shadowRoot!.querySelector('iron-collapse')!;
assertFalse(collapse.opened);
// User starts check.
page.shadowRoot!.querySelector<HTMLElement>(
'#safetyCheckParentButton')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.RUN_SAFETY_CHECK,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.Start',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the browser proxy call is done.
await safetyCheckBrowserProxy.whenCalled('runSafetyCheck');
// Mock all incoming messages that indicate safety check is running.
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.CHECKING);
fireSafetyCheckPasswordsEvent(SafetyCheckPasswordsStatus.CHECKING);
fireSafetyCheckSafeBrowsingEvent(SafetyCheckSafeBrowsingStatus.CHECKING);
fireSafetyCheckExtensionsEvent(SafetyCheckExtensionsStatus.CHECKING);
fireSafetyCheckChromeCleanerEvent(SafetyCheckChromeCleanerStatus.CHECKING);
fireSafetyCheckParentEvent(SafetyCheckParentStatus.CHECKING);
flush();
// Only the icon button is present.
assertFalse(!!page.shadowRoot!.querySelector('#safetyCheckParentButton'));
assertTrue(!!page.shadowRoot!.querySelector('cr-icon-button'));
// Collapse is opened.
assertTrue(collapse.opened);
// Mock all incoming messages that indicate safety check completion.
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.UPDATED);
fireSafetyCheckPasswordsEvent(SafetyCheckPasswordsStatus.SAFE);
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.ENABLED_STANDARD);
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.NO_BLOCKLISTED_EXTENSIONS);
fireSafetyCheckChromeCleanerEvent(SafetyCheckChromeCleanerStatus.INFECTED);
fireSafetyCheckParentEvent(SafetyCheckParentStatus.AFTER);
flush();
// Only the icon button is present.
assertFalse(!!page.shadowRoot!.querySelector('#safetyCheckParentButton'));
assertTrue(!!page.shadowRoot!.querySelector('cr-icon-button'));
// Collapse is opened.
assertTrue(page.shadowRoot!.querySelector('iron-collapse')!.opened);
// Ensure the automatic browser proxy calls are started.
return safetyCheckBrowserProxy.whenCalled('getParentRanDisplayString');
});
test('HappinessTrackingSurveysTest', async function() {
const testHatsBrowserProxy = new TestHatsBrowserProxy();
HatsBrowserProxyImpl.setInstance(testHatsBrowserProxy);
page.shadowRoot!.querySelector<HTMLElement>(
'#safetyCheckParentButton')!.click();
const interaction =
await testHatsBrowserProxy.whenCalled('trustSafetyInteractionOccurred');
assertEquals(TrustSafetyInteraction.RAN_SAFETY_CHECK, interaction);
});
});
suite('SafetyCheckChildTests', function() {
let page: SettingsSafetyCheckChildElement;
setup(function() {
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-child');
document.body.appendChild(page);
});
teardown(function() {
page.remove();
});
test('testIconStatusRunning', function() {
page.iconStatus = SafetyCheckIconStatus.RUNNING;
flush();
const statusIconElem =
page.shadowRoot!.querySelector<HTMLElement>('#statusIcon');
assertTrue(!!statusIconElem);
assertTrue(statusIconElem!.classList.contains('icon-blue'));
assertFalse(statusIconElem!.classList.contains('icon-red'));
assertEquals('Running', statusIconElem!.getAttribute('aria-label'));
});
test('testIconStatusSafe', function() {
page.iconStatus = SafetyCheckIconStatus.SAFE;
flush();
const statusIconElem =
page.shadowRoot!.querySelector<HTMLElement>('#statusIcon');
assertTrue(!!statusIconElem);
assertTrue(statusIconElem!.classList.contains('icon-blue'));
assertFalse(statusIconElem!.classList.contains('icon-red'));
assertEquals('Passed', statusIconElem!.getAttribute('aria-label'));
});
test('testIconStatusInfo', function() {
page.iconStatus = SafetyCheckIconStatus.INFO;
flush();
const statusIconElem =
page.shadowRoot!.querySelector<HTMLElement>('#statusIcon');
assertTrue(!!statusIconElem);
assertFalse(statusIconElem!.classList.contains('icon-blue'));
assertFalse(statusIconElem!.classList.contains('icon-red'));
assertEquals('Info', statusIconElem!.getAttribute('aria-label'));
});
test('testIconStatusWarning', function() {
page.iconStatus = SafetyCheckIconStatus.WARNING;
flush();
const statusIconElem =
page.shadowRoot!.querySelector<HTMLElement>('#statusIcon');
assertTrue(!!statusIconElem);
assertFalse(statusIconElem!.classList.contains('icon-blue'));
assertTrue(statusIconElem!.classList.contains('icon-red'));
assertEquals('Warning', statusIconElem!.getAttribute('aria-label'));
});
test('testLabelText', function() {
page.label = 'Main label test text';
flush();
const label = page.shadowRoot!.querySelector('#label');
assertTrue(!!label);
assertEquals('Main label test text', label!.textContent!.trim());
});
test('testSubLabelText', function() {
page.subLabel = 'Sub label test text';
flush();
const subLabel = page.shadowRoot!.querySelector('#subLabel');
assertTrue(!!subLabel);
assertEquals('Sub label test text', subLabel!.textContent!.trim());
});
test('testSubLabelNoText', function() {
// sublabel not set -> empty sublabel in element
const subLabel = page.shadowRoot!.querySelector('#subLabel');
assertTrue(!!subLabel);
assertEquals('', subLabel!.textContent!.trim());
});
test('testButtonWithoutClass', function() {
page.buttonLabel = 'Button label';
page.buttonAriaLabel = 'Aria label';
flush();
const button = page.shadowRoot!.querySelector('#button');
assertTrue(!!button);
assertEquals('Button label', button!.textContent!.trim());
assertEquals('Aria label', button!.getAttribute('aria-label'));
assertFalse(button!.classList.contains('action-button'));
});
test('testButtonWithClass', function() {
page.buttonLabel = 'Button label';
page.buttonAriaLabel = 'Aria label';
page.buttonClass = 'action-button';
flush();
const button = page.shadowRoot!.querySelector('#button');
assertTrue(!!button);
assertEquals('Button label', button!.textContent!.trim());
assertEquals('Aria label', button!.getAttribute('aria-label'));
assertTrue(button!.classList.contains('action-button'));
});
test('testNoButton', function() {
// Button label not set -> no button.
assertFalse(!!page.shadowRoot!.querySelector('#button'));
});
test('testManagedIcon', function() {
page.managedIcon = 'cr20:domain';
flush();
assertTrue(!!page.shadowRoot!.querySelector('#managedIcon'));
});
test('testNoManagedIcon', function() {
// Managed icon not set -> no managed icon.
assertFalse(!!page.shadowRoot!.querySelector('#managedIcon'));
});
test('testRowClickableIndicator', function() {
page.rowClickable = true;
flush();
assertTrue(!!page.shadowRoot!.querySelector('#rowClickableIndicator'));
assertEquals(
'cr:arrow-right',
page.shadowRoot!.querySelector('#rowClickableIndicator')!.getAttribute(
'iron-icon'));
});
test('testExternalRowClickableIndicator', function() {
page.rowClickable = true;
page.external = true;
flush();
assertTrue(!!page.shadowRoot!.querySelector('#rowClickableIndicator'));
assertEquals(
'cr:open-in-new',
page.shadowRoot!.querySelector('#rowClickableIndicator')!.getAttribute(
'iron-icon'));
});
test('testNoRowClickableIndicator', function() {
// rowClickable not set -> no RowClickableIndicator.
assertFalse(!!page.shadowRoot!.querySelector('#rowClickableIndicator'));
});
});
suite('SafetyCheckUpdatesChildUiTests', function() {
let lifetimeBrowserProxy: TestLifetimeBrowserProxy;
let metricsBrowserProxy: TestMetricsBrowserProxy;
let page: SettingsSafetyCheckUpdatesChildElement;
setup(function() {
lifetimeBrowserProxy = new TestLifetimeBrowserProxy();
LifetimeBrowserProxyImpl.setInstance(lifetimeBrowserProxy);
metricsBrowserProxy = new TestMetricsBrowserProxy();
MetricsBrowserProxyImpl.setInstance(metricsBrowserProxy);
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-updates-child');
document.body.appendChild(page);
flush();
});
teardown(function() {
page.remove();
});
test('checkingUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.CHECKING);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.RUNNING,
label: 'Updates',
});
});
test('updatedUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.UPDATED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Updates',
});
});
test('updatingUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.UPDATING);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.RUNNING,
label: 'Updates',
});
});
test('relaunchUiTest', async function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.RELAUNCH);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Updates',
buttonLabel: page.i18n('aboutRelaunch'),
buttonAriaLabel: page.i18n('safetyCheckUpdatesButtonAriaLabel'),
buttonClass: 'action-button',
});
// User clicks the relaunch button.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>('#safetyCheckChild')!
.shadowRoot!.querySelector<HTMLElement>('#button')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.UPDATES_RELAUNCH,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.RelaunchAfterUpdates',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the browser proxy call is done.
return lifetimeBrowserProxy.whenCalled('relaunch');
});
test('disabledByAdminUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.DISABLED_BY_ADMIN);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Updates',
managedIcon: true,
});
});
test('failedOfflineUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.FAILED_OFFLINE);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Updates',
});
});
test('failedUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.FAILED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.WARNING,
label: 'Updates',
});
});
test('unknownUiTest', function() {
fireSafetyCheckUpdatesEvent(SafetyCheckUpdatesStatus.UNKNOWN);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Updates',
});
});
});
suite('SafetyCheckPasswordsChildUiTests', function() {
let metricsBrowserProxy: TestMetricsBrowserProxy;
let page: SettingsSafetyCheckPasswordsChildElement;
setup(function() {
metricsBrowserProxy = new TestMetricsBrowserProxy();
MetricsBrowserProxyImpl.setInstance(metricsBrowserProxy);
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-passwords-child');
document.body.appendChild(page);
flush();
});
teardown(function() {
page.remove();
Router.getInstance().navigateTo(routes.BASIC);
});
test('passwordCheckingUiTest', function() {
fireSafetyCheckPasswordsEvent(SafetyCheckPasswordsStatus.CHECKING);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.RUNNING,
label: 'Passwords',
});
});
test('passwordSafeUiTest', async function() {
fireSafetyCheckPasswordsEvent(SafetyCheckPasswordsStatus.SAFE);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Passwords',
rowClickable: true,
});
// User clicks the row.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.PASSWORDS_CARET_NAVIGATION,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ManagePasswordsThroughCaretNavigation',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the correct Settings page is shown.
assertEquals(
routes.CHECK_PASSWORDS, Router.getInstance().getCurrentRoute());
});
test('passwordCompromisedUiTest', async function() {
fireSafetyCheckPasswordsEvent(SafetyCheckPasswordsStatus.COMPROMISED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.WARNING,
label: 'Passwords',
buttonLabel: 'Review',
buttonAriaLabel: 'Review passwords',
buttonClass: 'action-button',
});
const passwordManager = new TestPasswordManagerProxy();
PasswordManagerImpl.setInstance(passwordManager);
// User clicks the manage passwords button.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>('#safetyCheckChild')!
.shadowRoot!.querySelector<HTMLElement>('#button')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.PASSWORDS_MANAGE_COMPROMISED_PASSWORDS,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ManagePasswords',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the correct Settings page is shown.
assertEquals(
routes.CHECK_PASSWORDS, Router.getInstance().getCurrentRoute());
// Ensure correct referrer sent to password check.
const referrer =
await passwordManager.whenCalled('recordPasswordCheckReferrer');
assertEquals(PasswordCheckReferrer.SAFETY_CHECK, referrer);
});
test('passwordWeakUiTest', async function() {
fireSafetyCheckPasswordsEvent(
SafetyCheckPasswordsStatus.WEAK_PASSWORDS_EXIST);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Passwords',
rowClickable: true,
});
// User clicks the manage passwords button.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.PASSWORDS_MANAGE_WEAK_PASSWORDS,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ManageWeakPasswords',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the correct Settings page is shown.
assertEquals(
routes.CHECK_PASSWORDS, Router.getInstance().getCurrentRoute());
});
test('passwordInfoStatesUiTest', function() {
// Iterate over all states
for (const state of Object.values(SafetyCheckPasswordsStatus)
.filter(v => Number.isInteger(v))) {
fireSafetyCheckPasswordsEvent(state as SafetyCheckPasswordsStatus);
flush();
// Check that icon status is the correct one for this password status.
switch (state) {
case SafetyCheckPasswordsStatus.OFFLINE:
case SafetyCheckPasswordsStatus.NO_PASSWORDS:
case SafetyCheckPasswordsStatus.SIGNED_OUT:
case SafetyCheckPasswordsStatus.FEATURE_UNAVAILABLE:
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Passwords',
});
break;
case SafetyCheckPasswordsStatus.QUOTA_LIMIT:
case SafetyCheckPasswordsStatus.ERROR:
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Passwords',
rowClickable: true,
});
break;
default:
// Not covered by this test.
break;
}
}
});
});
suite('SafetyCheckSafeBrowsingChildUiTests', function() {
let metricsBrowserProxy: TestMetricsBrowserProxy;
let page: SettingsSafetyCheckSafeBrowsingChildElement;
setup(function() {
metricsBrowserProxy = new TestMetricsBrowserProxy();
MetricsBrowserProxyImpl.setInstance(metricsBrowserProxy);
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-safe-browsing-child');
document.body.appendChild(page);
flush();
});
teardown(function() {
page.remove();
Router.getInstance().navigateTo(routes.BASIC);
});
test('safeBrowsingCheckingUiTest', function() {
fireSafetyCheckSafeBrowsingEvent(SafetyCheckSafeBrowsingStatus.CHECKING);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.RUNNING,
label: 'Safe Browsing',
});
});
test('safeBrowsingEnabledStandardUiTest', async function() {
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.ENABLED_STANDARD);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Safe Browsing',
rowClickable: true,
});
// User clicks the row.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.SAFE_BROWSING_CARET_NAVIGATION,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ManageSafeBrowsingThroughCaretNavigation',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the correct Settings page is shown.
assertEquals(routes.SECURITY, Router.getInstance().getCurrentRoute());
});
test('safeBrowsingEnabledStandardAvailableEnhancedUiTest', function() {
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.ENABLED_STANDARD_AVAILABLE_ENHANCED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Safe Browsing',
rowClickable: true,
});
});
test('safeBrowsingEnabledEnhancedUiTest', function() {
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.ENABLED_ENHANCED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Safe Browsing',
rowClickable: true,
});
});
test('safeBrowsingDisabledUiTest', async function() {
fireSafetyCheckSafeBrowsingEvent(SafetyCheckSafeBrowsingStatus.DISABLED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Safe Browsing',
buttonLabel: 'Manage',
buttonAriaLabel: 'Manage Safe Browsing',
buttonClass: 'action-button',
});
// User clicks the manage safe browsing button.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>('#safetyCheckChild')!
.shadowRoot!.querySelector<HTMLElement>('#button')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.SAFE_BROWSING_MANAGE,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ManageSafeBrowsing',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the correct Settings page is shown.
assertEquals(routes.SECURITY, Router.getInstance().getCurrentRoute());
});
test('safeBrowsingDisabledByAdminUiTest', function() {
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.DISABLED_BY_ADMIN);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Safe Browsing',
managedIcon: true,
rowClickable: true,
});
});
test('safeBrowsingDisabledByExtensionUiTest', function() {
fireSafetyCheckSafeBrowsingEvent(
SafetyCheckSafeBrowsingStatus.DISABLED_BY_EXTENSION);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Safe Browsing',
managedIcon: true,
rowClickable: true,
});
});
});
suite('SafetyCheckExtensionsChildUiTests', function() {
let metricsBrowserProxy: TestMetricsBrowserProxy;
let openWindowProxy: TestOpenWindowProxy;
let page: SettingsSafetyCheckExtensionsChildElement;
setup(function() {
metricsBrowserProxy = new TestMetricsBrowserProxy();
MetricsBrowserProxyImpl.setInstance(metricsBrowserProxy);
openWindowProxy = new TestOpenWindowProxy();
OpenWindowProxyImpl.setInstance(openWindowProxy);
document.body.innerHTML = '';
page = document.createElement('settings-safety-check-extensions-child');
document.body.appendChild(page);
flush();
});
teardown(function() {
page.remove();
});
/** @return {!Promise} */
async function expectExtensionsButtonClickActions() {
// User clicks review extensions button.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>('#safetyCheckChild')!
.shadowRoot!.querySelector<HTMLElement>('#button')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.EXTENSIONS_REVIEW,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ReviewExtensions',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the browser proxy call is done.
assertEquals(
'chrome://extensions', await openWindowProxy.whenCalled('openURL'));
}
test('extensionsCheckingUiTest', function() {
fireSafetyCheckExtensionsEvent(SafetyCheckExtensionsStatus.CHECKING);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.RUNNING,
label: 'Extensions',
});
});
test('extensionsErrorUiTest', function() {
fireSafetyCheckExtensionsEvent(SafetyCheckExtensionsStatus.ERROR);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Extensions',
rowClickable: true,
});
});
test('extensionsSafeUiTest', async function() {
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.NO_BLOCKLISTED_EXTENSIONS);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Extensions',
rowClickable: true,
});
// User clicks the row.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!.click();
// Ensure UMA is logged.
assertEquals(
SafetyCheckInteractions.EXTENSIONS_CARET_NAVIGATION,
await metricsBrowserProxy.whenCalled(
'recordSafetyCheckInteractionHistogram'));
assertEquals(
'Settings.SafetyCheck.ReviewExtensionsThroughCaretNavigation',
await metricsBrowserProxy.whenCalled('recordAction'));
// Ensure the browser proxy call is done.
const url = await openWindowProxy.whenCalled('openURL');
assertEquals('chrome://extensions', url);
});
test('extensionsBlocklistedOffUiTest', async function() {
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.BLOCKLISTED_ALL_DISABLED);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.SAFE,
label: 'Extensions',
rowClickable: true,
});
// User clicks the row.
page.shadowRoot!
.querySelector<SettingsSafetyCheckChildElement>(
'#safetyCheckChild')!.click();
// Ensure the browser proxy call is done.
const url = await openWindowProxy.whenCalled('openURL');
assertEquals('chrome://extensions', url);
});
test('extensionsBlocklistedOnAllUserUiTest', function() {
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.BLOCKLISTED_REENABLED_ALL_BY_USER);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.WARNING,
label: 'Extensions',
buttonLabel: 'Review',
buttonAriaLabel: 'Review extensions',
buttonClass: 'action-button',
});
return expectExtensionsButtonClickActions();
});
test('extensionsBlocklistedOnUserAdminUiTest', function() {
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.BLOCKLISTED_REENABLED_SOME_BY_USER);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.WARNING,
label: 'Extensions',
buttonLabel: 'Review',
buttonAriaLabel: 'Review extensions',
buttonClass: 'action-button',
managedIcon: false,
});
return expectExtensionsButtonClickActions();
});
test('extensionsBlocklistedOnAllAdminUiTest', function() {
fireSafetyCheckExtensionsEvent(
SafetyCheckExtensionsStatus.BLOCKLISTED_REENABLED_ALL_BY_ADMIN);
flush();
assertSafetyCheckChild({
page: page,
iconStatus: SafetyCheckIconStatus.INFO,
label: 'Extensions',
managedIcon: true,
rowClickable: true,
});
});
}); | the_stack |
import * as _ from "lodash";
import * as jerr from "../errors";
import UpdatePrimitive from "./primitives/UpdatePrimitive";
import InsertIntoObject from "./primitives/InsertIntoObject";
import InsertIntoArray from "./primitives/InsertIntoArray";
import DeleteFromObject from "./primitives/DeleteFromObject";
import DeleteFromArray from "./primitives/DeleteFromArray";
import ReplaceInObject from "./primitives/ReplaceInObject";
import ReplaceInArray from "./primitives/ReplaceInArray";
import RenameInObject from "./primitives/RenameInObject";
import Insert from "./primitives/Insert";
import Remove from "./primitives/Remove";
import UpdatePrimitives from "./UpdatePrimitives";
import { IPUL } from "./IPUL";
import { ITransaction } from "../stores/ITransaction";
export default class PUL implements IPUL {
udps = new UpdatePrimitives();
parse(pul: string): PUL {
var newPul:PUL = JSON.parse(pul);
this.udps.parse(newPul.udps);
return this;
}
serialize(): string {
return JSON.stringify(this);
}
invert(transaction: ITransaction): Promise<PUL> {
var promises = [];
var pul = new PUL();
this.normalize();
this.udps.getAll().forEach((udp) => {
var lPUL = new PUL();
var target;
if(udp instanceof Remove) {
target = transaction.get(udp.id).then((item) => { return item; });
} else if(!(udp instanceof Insert)) {
target = udp.lockTarget(transaction).then(() => { return udp.getTarget(); });
} else {
//TODO: remove setTimeout
target = new Promise((resolve, reject) => { setTimeout(resolve, 1); });
}
target.then((target) => {
udp.invert(target, lPUL);
lPUL.udps.insert.forEach((udp) => {
pul.insert(udp.id, udp.item);
});
lPUL.udps.remove.forEach((udp) => {
pul.remove(udp.id);
});
lPUL.udps.deleteFromArray.forEach((udp) => {
pul.deleteFromArray(udp.id, udp.ordPath, udp.position);
});
lPUL.udps.deleteFromObject.forEach((udp) => {
pul.deleteFromObject(udp.id, udp.ordPath, udp.keys);
});
lPUL.udps.insertIntoArray.forEach((udp) => {
pul.insertIntoArray(udp.id, udp.ordPath, udp.position, udp.items);
});
lPUL.udps.insertIntoObject.forEach((udp) => {
var idx = _.findIndex(pul.udps.insertIntoObject, { id: udp.id, ordPath: udp.ordPath });
if(idx > -1) {
_.merge(pul.udps.insertIntoObject[idx].pairs, udp.pairs);
} else {
pul.udps.insertIntoObject.push(udp);
}
});
});
promises.push(target);
});
return Promise.all(promises).then(() => {
return pul.normalize();
});
}
apply(transaction: ITransaction): Promise<any> {
//Normalize PUL
this.normalize();
//Lock targets
var promises = [];
this.udps.getAll().forEach((udp) => {
if(!(udp instanceof Insert) && !(udp instanceof Remove)) {
var p = udp.lockTarget(transaction);
promises.push(p);
}
});
return Promise.all(promises).then(() => {
var apply = (udp: UpdatePrimitive) => {
var id = udp.id;
udp.apply();
transaction.put(id, udp.getDocument());
};
//Apply updates
_.forEach(this.udps.insert, (udp) => {
transaction.put(udp.id, udp.item);
});
_.forEach(this.udps.remove, (udp) => {
transaction.remove(udp.id);
});
//1. jupd:replace-in-object
_.forEach(this.udps.replaceInObject, apply);
//2. jupd:delete-from-object
_.forEach(this.udps.deleteFromObject, apply);
//3. jupd:rename-in-object
_.forEach(this.udps.renameInObject, apply);
//4. jupd:insert-into-object
_.forEach(this.udps.insertIntoObject, apply);
//The array update primitives, furthermore, are applied right-to-left with re- gard to their index.
//This obviates the problem of indexes being shifted and/or becoming invalid due to deletions or insertions.
//TODO: test
//5. jupd:replace-in-array
_.sortBy(this.udps.replaceInArray, "position");
_.forEach(this.udps.replaceInArray, apply);
//6. jupd:delete-from-array
_.sortBy(this.udps.deleteFromArray, "position");
_.forEach(this.udps.deleteFromArray, apply);
//7. jupd:insert-into-array
_.sortBy(this.udps.insertIntoArray, "position");
_.forEach(this.udps.insertIntoArray, apply);
return transaction.done();
});
}
normalize(): PUL {
//If there is a delete on the same (array,index) target, the replace is omitted.
_.forEach(this.udps.deleteFromArray, (udp: DeleteFromArray) => {
<ReplaceInArray[]>_.remove(this.udps.replaceInArray, { id: udp.id, ordPath: udp.ordPath, position: udp.position });
});
//If there is a delete on the same (object,name) target, the replace is omitted.
//If there is a delete on the same (object,name) target, the rename is omitted.
_.forEach(this.udps.deleteFromObject, (udp: DeleteFromObject) => {
_.forEach(udp.keys, (key: string) => {
<ReplaceInObject[]>_.remove(this.udps.replaceInObject, { id: udp.id, ordPath: udp.ordPath, key: key });
<RenameInObject[]>_.remove(this.udps.renameInObject, { id: udp.id, ordPath: udp.ordPath, key: key });
});
});
//If there is a remove primitive, all primitives with the same target are removed
var handler = (udps, id) => {
var idx;
while((idx = _.findIndex(udps, { id: id })) > -1) {
udps.splice(idx, 1);
}
};
this.udps.remove.forEach(udp => {
handler(this.udps.deleteFromArray, udp.id);
handler(this.udps.deleteFromObject, udp.id);
handler(this.udps.insertIntoArray, udp.id);
handler(this.udps.insertIntoObject, udp.id);
handler(this.udps.replaceInArray, udp.id);
handler(this.udps.replaceInObject, udp.id);
handler(this.udps.renameInObject, udp.id);
handler(this.udps.insert, udp.id);
});
//We remove InsertIntoObject primitives with no pairs (if they have been removed because of composition)
var idx = _.findIndex(this.udps.insertIntoObject, udp => {
return Object.keys(udp.pairs).length === 0;
});
if(idx > -1) {
this.udps.insertIntoObject.splice(idx, 1);
}
return this;
}
remove(id: string): PUL {
var newUdp = new Remove(id);
var udp = _.find(this.udps.remove, { id: id });
if(udp === undefined) {
this.udps.remove.push(newUdp);
}
return this;
}
insert(id: string, item: any): PUL {
var newUdp = new Insert(id, item);
var udp = _.find(this.udps.insert, { id: id });
if(udp) {
throw new jerr.JNUP0005();
} else {
this.udps.insert.push(newUdp);
}
return this;
}
/*
* jupd:insert-into-object($o as object(), $p as object())
* Inserts all pairs of the object $p into the object $o.
*/
insertIntoObject(id: string, ordPath: string[], pairs: {}): PUL {
var newUdp = new InsertIntoObject(id, ordPath, pairs);
//Multiple UPs of this type with the same object target are merged into one UP with this target,
//where the sources containing the pairs to insert are merged into one object.
//An error jerr:JNUP0005 is raised if a collision occurs.
var udp = _.find(this.udps.insertIntoObject, { id: id, ordPath: ordPath });
if(udp) {
udp.merge(newUdp);
} else {
this.udps.insertIntoObject.push(newUdp);
}
return this;
}
/*
* jupd:insert-into-array($a as array(), $i as xs:integer, $c as item()*)
* Inserts all items in the sequence $c before position $i into the array $a.
*/
insertIntoArray(id: string, ordPath: string[], position: number, items: any[]): PUL {
var newUdp = new InsertIntoArray(id, ordPath, position, items);
//Multiple UPs of this type with the same (array,index) target are merged into one UP with this target,
//where the items are merged in an implementation-dependent order.
//Several inserts on the same array and selector (position) are equivalent to a unique insert on that array and selector with the content of those original inserts appended in an implementation-dependent order.
var udp = _.find(this.udps.insertIntoArray, { id: id, ordPath: ordPath, position: position });
if(udp) {
udp.merge(newUdp);
} else {
this.udps.insertIntoArray.push(newUdp);
}
return this;
}
/*
* jupd:delete-from-object($o as object(), $s as xs:string*)
* Removes the pairs the names of which appear in $s from the object $o.
*/
deleteFromObject(id: string, ordPath: string[], keys: Array<string>): PUL {
var newUdp = new DeleteFromObject(id, ordPath, keys);
//Multiple UPs of this type with the same object target are merged into one UP with this target,
//where the selectors (names lists) are merged. Duplicate names are removed.
var udp = _.find(this.udps.deleteFromObject, { id: id, ordPath: ordPath });
if(udp) {
udp.merge(newUdp);
} else {
this.udps.deleteFromObject.push(newUdp);
}
return this;
}
/*
* jupd:delete-from-array($a as array(), $i as xs:integer)
* Removes the item at position $i from the array $a (causes all following items in the array to move one position to the left).
*/
deleteFromArray(id: string, ordPath: string[], position: number): PUL {
var newUdp = new DeleteFromArray(id, ordPath, position);
//Multiple UPs of this type with the same (array,index) target are merged into one UP with this target.
var udp = _.find(this.udps.deleteFromArray, { id: id, ordPath: ordPath, position: position });
if(!udp) {
this.udps.deleteFromArray.push(newUdp);
}
return this;
}
/*
* jupd:replace-in-array($a as array(), $i as xs:integer, $v as item())
* Replaces the item at position $i in the array $a with the item $v (do nothing if $i is not comprised between 1 and jdm:size($a)).
*/
replaceInArray(id: string, ordPath: string[], position: number, item: any): PUL {
var newUdp = new ReplaceInArray(id, ordPath, position, item);
//The presence of multiple UPs of this type with the same (array,index) target raises an error.
var udp = _.find(this.udps.replaceInArray, { id: id, ordPath: ordPath, position: position });
if(udp) {
throw new jerr.JNUP0009();
} else {
this.udps.replaceInArray.push(newUdp);
}
return this;
}
/*
* jupd:replace-in-object($o as object(), $n as xs:string, $v as item())
* Replaces the value of the pair named $n in the object $o with the item $v (do nothing if there is no such pair).
*/
replaceInObject(id: string, ordPath: string[], key: string, item: any): PUL {
var newUdp = new ReplaceInObject(id, ordPath, key, item);
//The presence of multiple UPs of this type with the same (array,index) target raises an error.
var udp = _.find(this.udps.replaceInObject, { id: id, ordPath: ordPath, key: key });
if(udp) {
throw new jerr.JNUP0009();
} else {
this.udps.replaceInObject.push(newUdp);
}
return this;
}
/*
* jupd:rename-in-object($o as object(), $n as xs:string, $p as xs:string)
* Renames the pair originally named $n in the object $o as $p (do nothing if there is no such pair).
*/
renameInObject(id: string, ordPath: string[], key: string, newKey: string): PUL {
var newUdp = new RenameInObject(id, ordPath, key, newKey);
//The presence of multiple UPs of this type with the same (object,name) target raises an error.
//If there is a delete on the same (object,name) target, the rename is omitted.
var udp = _.find(this.udps.renameInObject, { id: id, ordPath: ordPath, key: key });
if(udp) {
throw new jerr.JNUP0009();
} else {
this.udps.renameInObject.push(newUdp);
}
return this;
}
} | the_stack |
namespace LiteMol.Example.Channels.State {
import Transformer = Bootstrap.Entity.Transformer;
export type SelectableElement =
| { kind: 'nothing' }
| { kind: 'molecule', data: Bootstrap.Interactivity.Molecule.SelectionInfo }
| { kind: 'point', data: number[] }
export interface AppState {
plugin: Plugin.Controller,
events: {
select: Bootstrap.Rx.Observable<SelectableElement>
}
}
export function AppState(plugin: Plugin.Controller): AppState {
Behaviour.initCavityBoundaryToggle(plugin);
return {
plugin,
events: {
select: Behaviour.createSelectEvent(plugin)
}
};
}
export type SurfaceTag =
| { kind: 'Channel' | 'Cavity-inner' | 'Origins', element: any }
| { kind: 'Cavity-boundary', element: any, surface: Core.Geometry.Surface }
function showDefaultVisuals(plugin: Plugin.Controller, data: any, channelCount: number) {
return new Promise(res =>
showChannelVisuals(plugin, data.Channels.Tunnels.slice(0, channelCount), true).then(() => {
let cavity = data.Cavities.Cavities[0];
if (cavity) {
showCavityVisuals(plugin, [cavity], true).then(() => res());
}
}));
}
export function loadData(plugin: Plugin.Controller, pdbId: string, url: string) {
return new Promise<any>((res, rej) => {
plugin.clear();
let model = plugin.createTransform()
.add(plugin.root, Transformer.Data.Download, { url: `https://www.ebi.ac.uk/pdbe/static/entry/${pdbId}_updated.cif`, type: 'String', id: pdbId })
.then(Transformer.Molecule.CreateFromData, { format: Core.Formats.Molecule.SupportedFormats.mmCIF }, { isBinding: true })
.then(Transformer.Molecule.CreateModel, { modelIndex: 0 })
.then(Transformer.Molecule.CreateMacromoleculeVisual, { polymer: true, polymerRef: 'polymer-visual', het: true })
let data = plugin.createTransform().add(plugin.root, Transformer.Data.Download, { url, type: 'String', id: 'MOLE Data' }, { isHidden: true })
.then(Transformer.Data.ParseJson, { id: 'MOLE Data' }, { ref: 'mole-data' });
plugin.applyTransform(model)
.then(() => {
plugin.command(Bootstrap.Command.Entity.Focus, plugin.context.select('polymer-visual'));
});
plugin.applyTransform(data)
.then(() => {
let data = plugin.context.select('mole-data')[0] as Bootstrap.Entity.Data.Json;
if (!data) rej('Data not available.');
else {
showDefaultVisuals(plugin, data.props.data, 2).then(() => res(data.props.data));
}
})
.catch(e => rej(e));
});
}
function createSurface(mesh: any) {
// wrap the vertices in typed arrays
if (!(mesh.Vertices instanceof Float32Array)) {
mesh.Vertices = new Float32Array(mesh.Vertices);
}
if (!(mesh.Vertices instanceof Uint32Array)) {
mesh.Triangles = new Uint32Array(mesh.Triangles);
}
let surface = <Core.Geometry.Surface>{
vertices: mesh.Vertices,
vertexCount: (mesh.Vertices.length / 3) | 0,
triangleIndices: new Uint32Array(mesh.Triangles),
triangleCount: (mesh.Triangles.length / 3) | 0,
};
return surface;
}
function createTriangleSurface(mesh: any) {
const triangleCount = (mesh.Triangles.length / 3) | 0;
const vertexCount = triangleCount * 3;
const srcV = mesh.Vertices;
const srcT = mesh.Triangles;
const vertices = new Float32Array(vertexCount * 3);
const triangleIndices = new Uint32Array(triangleCount * 3);
const annotation = new Int32Array(vertexCount) as any as number[];
const tri = [0,0,0];
for (let i = 0, _i = mesh.Triangles.length; i < _i; i += 3) {
tri[0] = srcT[i]; tri[1] = srcT[i + 1]; tri[2] = srcT[i + 2];
for (let j = 0; j < 3; j++) {
const v = i + j;
vertices[3 * v] = srcV[3 * tri[j]];
vertices[3 * v + 1] = srcV[3 * tri[j] + 1];
vertices[3 * v + 2] = srcV[3 * tri[j] + 2];
triangleIndices[i + j] = i + j;
}
}
for (let i = 0; i < triangleCount; i++) {
for (let j = 0; j < 3; j++) annotation[3 * i + j] = i;
}
const surface = <Core.Geometry.Surface>{
vertices,
vertexCount,
triangleIndices,
triangleCount,
annotation
};
return surface;
}
let colorIndex = Visualization.Molecule.Colors.DefaultPallete.length - 1;
function nextColor() {
return Visualization.Color.random();
// can use the build in palette for example like this:
// let color = Visualization.Molecule.Colors.DefaultPallete[colorIndex];
// colorIndex--;
// if (colorIndex < 0) colorIndex = Visualization.Molecule.Colors.DefaultPallete.length - 1;
// return color;
}
function showSurfaceVisuals(plugin: Plugin.Controller, elements: any[], visible: boolean, type: 'Cavity' | 'Channel', label: (e: any) => string): Promise<any> {
// I am modifying the original JSON response. In general this is not a very good
// idea and should be avoided in "real" apps.
let t = plugin.createTransform();
let needsApply = false;
for (let element of elements) {
if (!element.__id) element.__id = Bootstrap.Utils.generateUUID();
if (!!element.__isVisible === visible) continue;
element.__isVisible = visible;
if (!element.__color) {
// the colors should probably be initialized when the data is loaded
// so that they are deterministic...
element.__color = nextColor();
}
if (!visible) {
plugin.command(Bootstrap.Command.Tree.RemoveNode, element.__id);
} else if (type === 'Cavity') {
const boundarySurface = createTriangleSurface(element.Mesh.Boundary);
const group = t.add('mole-data', Transformer.Basic.CreateGroup, { }, { ref: element.__id, isHidden: true });
group.then(Transformer.Basic.CreateSurfaceVisual, {
label: label(element),
tag: <SurfaceTag>{ kind: 'Cavity-boundary', element, surface: boundarySurface },
surface: boundarySurface,
theme: Behaviour.CavityTheme.boundary
});
group.then(Transformer.Basic.CreateSurfaceVisual, {
label: label(element),
tag: <SurfaceTag>{ kind: 'Cavity-inner', element },
surface: createSurface(element.Mesh.Inner),
theme: Behaviour.CavityTheme.inner
});
needsApply = true;
} else if (type === 'Channel') {
let surface = createSurface(element.Mesh);
t.add('mole-data', Transformer.Basic.CreateSurfaceVisual, {
label: label(element),
tag: <SurfaceTag>{ kind: 'Channel', element },
surface,
theme: Visualization.Theme.createUniform({
colors: LiteMol.Core.Utils.FastMap.ofArray<string, LiteMol.Visualization.Color>([['Uniform', element.__color as Visualization.Color]]),
transparency: { alpha: 1.0 }
})
}, { ref: element.__id, isHidden: true });
needsApply = true;
}
}
if (needsApply) {
return new Promise<any>((res, rej) => {
plugin.applyTransform(t).then(() => {
for (let element of elements) {
element.__isBusy = false;
}
res();
}).catch(e => rej(e));
});
}
else {
return new Promise<any>((res, rej) => {
for (let element of elements) {
element.__isBusy = false;
}
res();
});
}
}
export function showCavityVisuals(plugin: Plugin.Controller, cavities: any[], visible: boolean): Promise<any> {
return showSurfaceVisuals(plugin, cavities, visible, 'Cavity', (cavity: any) => `${cavity.Type} ${cavity.Id}`);
}
export function showChannelVisuals(plugin: Plugin.Controller, channels: any[], visible: boolean): Promise<any> {
return showSurfaceVisuals(plugin, channels, visible, 'Channel', (channel: any) => `${channel.Type} ${channel.Id}`);
}
function createOriginsSurface(origins: any): Promise<Core.Geometry.Surface> {
if (origins.__surface) return Promise.resolve(origins.__surface);
let s = Visualization.Primitive.Builder.create();
let id = 0;
for (let p of origins.Points) {
s.add({ type: 'Sphere', id: id++, radius: 1.69, center: [p.X, p.Y, p.Z] });
}
return s.buildSurface().run();
}
export function showOriginsSurface(plugin: Plugin.Controller, origins: any, visible: boolean): Promise<any> {
if (!origins.__id) origins.__id = Bootstrap.Utils.generateUUID();
if (!origins.Points.length || !!origins.__isVisible === visible) return Promise.resolve();
origins.__isVisible = visible;
if (!visible) {
plugin.command(Bootstrap.Command.Tree.RemoveNode, origins.__id);
origins.__isBusy = false;
return Promise.resolve();
}
if (!origins.__color) {
// the colors should probably be initialized when the data is loaded
// so that they are deterministic...
origins.__color = nextColor();
}
return new Promise((res, rej) => {
createOriginsSurface(origins).then(surface => {
let t = plugin.createTransform()
.add('mole-data', Transformer.Basic.CreateSurfaceVisual, {
label: 'Origins ' + origins.Type,
tag: <SurfaceTag>{ kind: 'Origins', element: origins },
surface,
theme: Visualization.Theme.createUniform({
colors: LiteMol.Core.Utils.FastMap.ofArray<string, LiteMol.Visualization.Color>([['Uniform', origins.__color as Visualization.Color]])
})
}, { ref: origins.__id, isHidden: true });
plugin.applyTransform(t).then(() => {
origins.__isBusy = false;
res();
}).catch(rej);
}).catch(rej);
});
}
} | the_stack |
import {Mesh} from "../common/mesh.js";
import {GL_ARRAY_BUFFER, GL_ELEMENT_ARRAY_BUFFER, GL_STATIC_DRAW} from "../common/webgl.js";
export function mesh_icosphere_smooth(gl: WebGLRenderingContext): Mesh {
let vertex_buf = gl.createBuffer()!;
gl.bindBuffer(GL_ARRAY_BUFFER, vertex_buf);
gl.bufferData(GL_ARRAY_BUFFER, vertex_arr, GL_STATIC_DRAW);
let normal_buf = gl.createBuffer()!;
gl.bindBuffer(GL_ARRAY_BUFFER, normal_buf);
gl.bufferData(GL_ARRAY_BUFFER, normal_arr, GL_STATIC_DRAW);
let texcoord_buf = gl.createBuffer()!;
gl.bindBuffer(GL_ARRAY_BUFFER, texcoord_buf);
gl.bufferData(GL_ARRAY_BUFFER, texcoord_arr, GL_STATIC_DRAW);
let weights_buf = gl.createBuffer()!;
gl.bindBuffer(GL_ARRAY_BUFFER, weights_buf);
gl.bufferData(GL_ARRAY_BUFFER, weights_arr, GL_STATIC_DRAW);
let index_buf = gl.createBuffer()!;
gl.bindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buf);
gl.bufferData(GL_ELEMENT_ARRAY_BUFFER, index_arr, GL_STATIC_DRAW);
return {
VertexBuffer: vertex_buf,
VertexArray: vertex_arr,
NormalBuffer: normal_buf,
NormalArray: normal_arr,
TexCoordBuffer: texcoord_buf,
TexCoordArray: texcoord_arr,
WeightsBuffer: weights_buf,
WeightsArray: weights_arr,
IndexBuffer: index_buf,
IndexArray: index_arr,
IndexCount: index_arr.length,
};
}
// prettier-ignore
let vertex_arr = Float32Array.from([
0, -0.5, 0,
0.223605, -0.425327, 0.154506,
-0.085408, -0.425327, 0.249998,
0.380422, -0.22361, 0.262863,
0.223605, -0.425327, 0.154506,
0.447211, -0.262868, 0,
0, -0.5, 0,
-0.085408, -0.425327, 0.249998,
-0.276392, -0.425326, 0,
0, -0.5, 0,
-0.276392, -0.425326, 0,
-0.085408, -0.425327, -0.249998,
0, -0.5, 0,
-0.085408, -0.425327, -0.249998,
0.223605, -0.425327, -0.154506,
0.380422, -0.22361, 0.262863,
0.447211, -0.262868, 0,
0.5, 0, 0.154506,
-0.145306, -0.22361, 0.425325,
0.138198, -0.262869, 0.404506,
0, 0, 0.5,
-0.470227, -0.223608, 0,
-0.361802, -0.262868, 0.249998,
-0.5, 0, 0.154506,
-0.145306, -0.22361, -0.425325,
-0.361802, -0.262868, -0.249998,
-0.309017, 0, -0.404508,
0.380422, -0.22361, -0.262863,
0.138198, -0.262869, -0.404506,
0.309017, 0, -0.404508,
0.380422, -0.22361, 0.262863,
0.5, 0, 0.154506,
0.309017, 0, 0.404508,
-0.145306, -0.22361, 0.425325,
0, 0, 0.5,
-0.309017, 0, 0.404508,
-0.470227, -0.223608, 0,
-0.5, 0, 0.154506,
-0.5, 0, -0.154506,
-0.145306, -0.22361, -0.425325,
-0.309017, 0, -0.404508,
0, 0, -0.5,
0.380422, -0.22361, -0.262863,
0.309017, 0, -0.404508,
0.5, 0, -0.154506,
0.145306, 0.22361, 0.425325,
0.361802, 0.262868, 0.249998,
0.085408, 0.425327, 0.249998,
-0.380422, 0.22361, 0.262863,
-0.138198, 0.262869, 0.404506,
-0.223605, 0.425327, 0.154506,
-0.380422, 0.22361, -0.262863,
-0.447211, 0.262868, 0,
-0.223605, 0.425327, -0.154506,
0.145306, 0.22361, -0.425325,
-0.138198, 0.262869, -0.404506,
0.085408, 0.425327, -0.249998,
0.470227, 0.223608, 0,
0.361802, 0.262868, -0.249998,
0.276392, 0.425326, 0,
0.276392, 0.425326, 0,
0.085408, 0.425327, -0.249998,
0, 0.5, 0,
0.276392, 0.425326, 0,
0.361802, 0.262868, -0.249998,
0.085408, 0.425327, -0.249998,
0.361802, 0.262868, -0.249998,
0.145306, 0.22361, -0.425325,
0.085408, 0.425327, -0.249998,
0.085408, 0.425327, -0.249998,
-0.223605, 0.425327, -0.154506,
0, 0.5, 0,
0.085408, 0.425327, -0.249998,
-0.138198, 0.262869, -0.404506,
-0.223605, 0.425327, -0.154506,
-0.138198, 0.262869, -0.404506,
-0.380422, 0.22361, -0.262863,
-0.223605, 0.425327, -0.154506,
-0.223605, 0.425327, -0.154506,
-0.223605, 0.425327, 0.154506,
0, 0.5, 0,
-0.223605, 0.425327, -0.154506,
-0.447211, 0.262868, 0,
-0.223605, 0.425327, 0.154506,
-0.447211, 0.262868, 0,
-0.380422, 0.22361, 0.262863,
-0.223605, 0.425327, 0.154506,
-0.223605, 0.425327, 0.154506,
0.085408, 0.425327, 0.249998,
0, 0.5, 0,
-0.223605, 0.425327, 0.154506,
-0.138198, 0.262869, 0.404506,
0.085408, 0.425327, 0.249998,
-0.138198, 0.262869, 0.404506,
0.145306, 0.22361, 0.425325,
0.085408, 0.425327, 0.249998,
0.085408, 0.425327, 0.249998,
0.276392, 0.425326, 0,
0, 0.5, 0,
0.085408, 0.425327, 0.249998,
0.361802, 0.262868, 0.249998,
0.276392, 0.425326, 0,
0.361802, 0.262868, 0.249998,
0.470227, 0.223608, 0,
0.276392, 0.425326, 0,
0.5, 0, -0.154506,
0.361802, 0.262868, -0.249998,
0.470227, 0.223608, 0,
0.5, 0, -0.154506,
0.309017, 0, -0.404508,
0.361802, 0.262868, -0.249998,
0.309017, 0, -0.404508,
0.145306, 0.22361, -0.425325,
0.361802, 0.262868, -0.249998,
0, 0, -0.5,
-0.138198, 0.262869, -0.404506,
0.145306, 0.22361, -0.425325,
0, 0, -0.5,
-0.309017, 0, -0.404508,
-0.138198, 0.262869, -0.404506,
-0.309017, 0, -0.404508,
-0.380422, 0.22361, -0.262863,
-0.138198, 0.262869, -0.404506,
-0.5, 0, -0.154506,
-0.447211, 0.262868, 0,
-0.380422, 0.22361, -0.262863,
-0.5, 0, -0.154506,
-0.5, 0, 0.154506,
-0.447211, 0.262868, 0,
-0.5, 0, 0.154506,
-0.380422, 0.22361, 0.262863,
-0.447211, 0.262868, 0,
-0.309017, 0, 0.404508,
-0.138198, 0.262869, 0.404506,
-0.380422, 0.22361, 0.262863,
-0.309017, 0, 0.404508,
0, 0, 0.5,
-0.138198, 0.262869, 0.404506,
0, 0, 0.5,
0.145306, 0.22361, 0.425325,
-0.138198, 0.262869, 0.404506,
0.309017, 0, 0.404508,
0.361802, 0.262868, 0.249998,
0.145306, 0.22361, 0.425325,
0.309017, 0, 0.404508,
0.5, 0, 0.154506,
0.361802, 0.262868, 0.249998,
0.5, 0, 0.154506,
0.470227, 0.223608, 0,
0.361802, 0.262868, 0.249998,
0.309017, 0, -0.404508,
0, 0, -0.5,
0.145306, 0.22361, -0.425325,
0.309017, 0, -0.404508,
0.138198, -0.262869, -0.404506,
0, 0, -0.5,
0.138198, -0.262869, -0.404506,
-0.145306, -0.22361, -0.425325,
0, 0, -0.5,
-0.309017, 0, -0.404508,
-0.5, 0, -0.154506,
-0.380422, 0.22361, -0.262863,
-0.309017, 0, -0.404508,
-0.361802, -0.262868, -0.249998,
-0.5, 0, -0.154506,
-0.361802, -0.262868, -0.249998,
-0.470227, -0.223608, 0,
-0.5, 0, -0.154506,
-0.5, 0, 0.154506,
-0.309017, 0, 0.404508,
-0.380422, 0.22361, 0.262863,
-0.5, 0, 0.154506,
-0.361802, -0.262868, 0.249998,
-0.309017, 0, 0.404508,
-0.361802, -0.262868, 0.249998,
-0.145306, -0.22361, 0.425325,
-0.309017, 0, 0.404508,
0, 0, 0.5,
0.309017, 0, 0.404508,
0.145306, 0.22361, 0.425325,
0, 0, 0.5,
0.138198, -0.262869, 0.404506,
0.309017, 0, 0.404508,
0.138198, -0.262869, 0.404506,
0.380422, -0.22361, 0.262863,
0.309017, 0, 0.404508,
0.5, 0, 0.154506,
0.5, 0, -0.154506,
0.470227, 0.223608, 0,
0.5, 0, 0.154506,
0.447211, -0.262868, 0,
0.5, 0, -0.154506,
0.447211, -0.262868, 0,
0.380422, -0.22361, -0.262863,
0.5, 0, -0.154506,
0.223605, -0.425327, -0.154506,
0.138198, -0.262869, -0.404506,
0.380422, -0.22361, -0.262863,
0.223605, -0.425327, -0.154506,
-0.085408, -0.425327, -0.249998,
0.138198, -0.262869, -0.404506,
-0.085408, -0.425327, -0.249998,
-0.145306, -0.22361, -0.425325,
0.138198, -0.262869, -0.404506,
-0.085408, -0.425327, -0.249998,
-0.361802, -0.262868, -0.249998,
-0.145306, -0.22361, -0.425325,
-0.085408, -0.425327, -0.249998,
-0.276392, -0.425326, 0,
-0.361802, -0.262868, -0.249998,
-0.276392, -0.425326, 0,
-0.470227, -0.223608, 0,
-0.361802, -0.262868, -0.249998,
-0.276392, -0.425326, 0,
-0.361802, -0.262868, 0.249998,
-0.470227, -0.223608, 0,
-0.276392, -0.425326, 0,
-0.085408, -0.425327, 0.249998,
-0.361802, -0.262868, 0.249998,
-0.085408, -0.425327, 0.249998,
-0.145306, -0.22361, 0.425325,
-0.361802, -0.262868, 0.249998,
0.447211, -0.262868, 0,
0.223605, -0.425327, -0.154506,
0.380422, -0.22361, -0.262863,
0.447211, -0.262868, 0,
0.223605, -0.425327, 0.154506,
0.223605, -0.425327, -0.154506,
0.223605, -0.425327, 0.154506,
0, -0.5, 0,
0.223605, -0.425327, -0.154506,
-0.085408, -0.425327, 0.249998,
0.138198, -0.262869, 0.404506,
-0.145306, -0.22361, 0.425325,
-0.085408, -0.425327, 0.249998,
0.223605, -0.425327, 0.154506,
0.138198, -0.262869, 0.404506,
0.223605, -0.425327, 0.154506,
0.380422, -0.22361, 0.262863,
0.138198, -0.262869, 0.404506
]);
// prettier-ignore
let normal_arr = Float32Array.from([
0, -1, 0,
0.4115, -0.8568, 0.3107,
-0.1564, -0.8515, 0.5004,
0.7095, -0.4566, 0.5367,
0.4115, -0.8568, 0.3107,
0.8415, -0.5402, 0,
0, -1, 0,
-0.1564, -0.8515, 0.5004,
-0.5101, -0.8601, 0,
0, -1, 0,
-0.5101, -0.8601, 0,
-0.1564, -0.8515, -0.5004,
0, -1, 0,
-0.1564, -0.8515, -0.5004,
0.4115, -0.8568, -0.3107,
0.7095, -0.4566, 0.5367,
0.8415, -0.5402, 0,
0.9474, -0.0004, 0.3199,
-0.2662, -0.4485, 0.8532,
0.2529, -0.5271, 0.8113,
0, 0, 1,
-0.887, -0.4617, 0,
-0.6736, -0.5351, 0.5098,
-0.9474, 0.0004, 0.3199,
-0.2662, -0.4485, -0.8532,
-0.6736, -0.5351, -0.5098,
-0.5727, -0.0005, -0.8197,
0.7095, -0.4566, -0.5367,
0.2529, -0.5271, -0.8113,
0.5727, 0.0005, -0.8197,
0.7095, -0.4566, 0.5367,
0.9474, -0.0004, 0.3199,
0.5727, 0.0005, 0.8197,
-0.2662, -0.4485, 0.8532,
0, 0, 1,
-0.5727, -0.0005, 0.8197,
-0.887, -0.4617, 0,
-0.9474, 0.0004, 0.3199,
-0.9474, 0.0004, -0.3199,
-0.2662, -0.4485, -0.8532,
-0.5727, -0.0005, -0.8197,
0, 0, -1,
0.7095, -0.4566, -0.5367,
0.5727, 0.0005, -0.8197,
0.9474, -0.0004, -0.3199,
0.2662, 0.4485, 0.8532,
0.6736, 0.5351, 0.5098,
0.1564, 0.8515, 0.5004,
-0.7095, 0.4566, 0.5367,
-0.2529, 0.5271, 0.8113,
-0.4115, 0.8568, 0.3107,
-0.7095, 0.4566, -0.5367,
-0.8415, 0.5402, 0,
-0.4115, 0.8568, -0.3107,
0.2662, 0.4485, -0.8532,
-0.2529, 0.5271, -0.8113,
0.1564, 0.8515, -0.5004,
0.887, 0.4617, 0,
0.6736, 0.5351, -0.5098,
0.5101, 0.8601, 0,
0.5101, 0.8601, 0,
0.1564, 0.8515, -0.5004,
0, 1, 0,
0.5101, 0.8601, 0,
0.6736, 0.5351, -0.5098,
0.1564, 0.8515, -0.5004,
0.6736, 0.5351, -0.5098,
0.2662, 0.4485, -0.8532,
0.1564, 0.8515, -0.5004,
0.1564, 0.8515, -0.5004,
-0.4115, 0.8568, -0.3107,
0, 1, 0,
0.1564, 0.8515, -0.5004,
-0.2529, 0.5271, -0.8113,
-0.4115, 0.8568, -0.3107,
-0.2529, 0.5271, -0.8113,
-0.7095, 0.4566, -0.5367,
-0.4115, 0.8568, -0.3107,
-0.4115, 0.8568, -0.3107,
-0.4115, 0.8568, 0.3107,
0, 1, 0,
-0.4115, 0.8568, -0.3107,
-0.8415, 0.5402, 0,
-0.4115, 0.8568, 0.3107,
-0.8415, 0.5402, 0,
-0.7095, 0.4566, 0.5367,
-0.4115, 0.8568, 0.3107,
-0.4115, 0.8568, 0.3107,
0.1564, 0.8515, 0.5004,
0, 1, 0,
-0.4115, 0.8568, 0.3107,
-0.2529, 0.5271, 0.8113,
0.1564, 0.8515, 0.5004,
-0.2529, 0.5271, 0.8113,
0.2662, 0.4485, 0.8532,
0.1564, 0.8515, 0.5004,
0.1564, 0.8515, 0.5004,
0.5101, 0.8601, 0,
0, 1, 0,
0.1564, 0.8515, 0.5004,
0.6736, 0.5351, 0.5098,
0.5101, 0.8601, 0,
0.6736, 0.5351, 0.5098,
0.887, 0.4617, 0,
0.5101, 0.8601, 0,
0.9474, -0.0004, -0.3199,
0.6736, 0.5351, -0.5098,
0.887, 0.4617, 0,
0.9474, -0.0004, -0.3199,
0.5727, 0.0005, -0.8197,
0.6736, 0.5351, -0.5098,
0.5727, 0.0005, -0.8197,
0.2662, 0.4485, -0.8532,
0.6736, 0.5351, -0.5098,
0, 0, -1,
-0.2529, 0.5271, -0.8113,
0.2662, 0.4485, -0.8532,
0, 0, -1,
-0.5727, -0.0005, -0.8197,
-0.2529, 0.5271, -0.8113,
-0.5727, -0.0005, -0.8197,
-0.7095, 0.4566, -0.5367,
-0.2529, 0.5271, -0.8113,
-0.9474, 0.0004, -0.3199,
-0.8415, 0.5402, 0,
-0.7095, 0.4566, -0.5367,
-0.9474, 0.0004, -0.3199,
-0.9474, 0.0004, 0.3199,
-0.8415, 0.5402, 0,
-0.9474, 0.0004, 0.3199,
-0.7095, 0.4566, 0.5367,
-0.8415, 0.5402, 0,
-0.5727, -0.0005, 0.8197,
-0.2529, 0.5271, 0.8113,
-0.7095, 0.4566, 0.5367,
-0.5727, -0.0005, 0.8197,
0, 0, 1,
-0.2529, 0.5271, 0.8113,
0, 0, 1,
0.2662, 0.4485, 0.8532,
-0.2529, 0.5271, 0.8113,
0.5727, 0.0005, 0.8197,
0.6736, 0.5351, 0.5098,
0.2662, 0.4485, 0.8532,
0.5727, 0.0005, 0.8197,
0.9474, -0.0004, 0.3199,
0.6736, 0.5351, 0.5098,
0.9474, -0.0004, 0.3199,
0.887, 0.4617, 0,
0.6736, 0.5351, 0.5098,
0.5727, 0.0005, -0.8197,
0, 0, -1,
0.2662, 0.4485, -0.8532,
0.5727, 0.0005, -0.8197,
0.2529, -0.5271, -0.8113,
0, 0, -1,
0.2529, -0.5271, -0.8113,
-0.2662, -0.4485, -0.8532,
0, 0, -1,
-0.5727, -0.0005, -0.8197,
-0.9474, 0.0004, -0.3199,
-0.7095, 0.4566, -0.5367,
-0.5727, -0.0005, -0.8197,
-0.6736, -0.5351, -0.5098,
-0.9474, 0.0004, -0.3199,
-0.6736, -0.5351, -0.5098,
-0.887, -0.4617, 0,
-0.9474, 0.0004, -0.3199,
-0.9474, 0.0004, 0.3199,
-0.5727, -0.0005, 0.8197,
-0.7095, 0.4566, 0.5367,
-0.9474, 0.0004, 0.3199,
-0.6736, -0.5351, 0.5098,
-0.5727, -0.0005, 0.8197,
-0.6736, -0.5351, 0.5098,
-0.2662, -0.4485, 0.8532,
-0.5727, -0.0005, 0.8197,
0, 0, 1,
0.5727, 0.0005, 0.8197,
0.2662, 0.4485, 0.8532,
0, 0, 1,
0.2529, -0.5271, 0.8113,
0.5727, 0.0005, 0.8197,
0.2529, -0.5271, 0.8113,
0.7095, -0.4566, 0.5367,
0.5727, 0.0005, 0.8197,
0.9474, -0.0004, 0.3199,
0.9474, -0.0004, -0.3199,
0.887, 0.4617, 0,
0.9474, -0.0004, 0.3199,
0.8415, -0.5402, 0,
0.9474, -0.0004, -0.3199,
0.8415, -0.5402, 0,
0.7095, -0.4566, -0.5367,
0.9474, -0.0004, -0.3199,
0.4115, -0.8568, -0.3107,
0.2529, -0.5271, -0.8113,
0.7095, -0.4566, -0.5367,
0.4115, -0.8568, -0.3107,
-0.1564, -0.8515, -0.5004,
0.2529, -0.5271, -0.8113,
-0.1564, -0.8515, -0.5004,
-0.2662, -0.4485, -0.8532,
0.2529, -0.5271, -0.8113,
-0.1564, -0.8515, -0.5004,
-0.6736, -0.5351, -0.5098,
-0.2662, -0.4485, -0.8532,
-0.1564, -0.8515, -0.5004,
-0.5101, -0.8601, 0,
-0.6736, -0.5351, -0.5098,
-0.5101, -0.8601, 0,
-0.887, -0.4617, 0,
-0.6736, -0.5351, -0.5098,
-0.5101, -0.8601, 0,
-0.6736, -0.5351, 0.5098,
-0.887, -0.4617, 0,
-0.5101, -0.8601, 0,
-0.1564, -0.8515, 0.5004,
-0.6736, -0.5351, 0.5098,
-0.1564, -0.8515, 0.5004,
-0.2662, -0.4485, 0.8532,
-0.6736, -0.5351, 0.5098,
0.8415, -0.5402, 0,
0.4115, -0.8568, -0.3107,
0.7095, -0.4566, -0.5367,
0.8415, -0.5402, 0,
0.4115, -0.8568, 0.3107,
0.4115, -0.8568, -0.3107,
0.4115, -0.8568, 0.3107,
0, -1, 0,
0.4115, -0.8568, -0.3107,
-0.1564, -0.8515, 0.5004,
0.2529, -0.5271, 0.8113,
-0.2662, -0.4485, 0.8532,
-0.1564, -0.8515, 0.5004,
0.4115, -0.8568, 0.3107,
0.2529, -0.5271, 0.8113,
0.4115, -0.8568, 0.3107,
0.7095, -0.4566, 0.5367,
0.2529, -0.5271, 0.8113
]);
// prettier-ignore
let texcoord_arr = Float32Array.from([
0.998999, 0.284713,
0.998999, 0.144859,
0.859145, 0.284713,
0.42757, 0.141856,
0.42757, 0.002002,
0.287716, 0.141856,
0.284713, 0.998999,
0.144859, 0.998999,
0.284713, 0.859145,
0.143858, 0.858144,
0.143858, 0.997998,
0.283712, 0.858144,
0.858144, 0.143858,
0.997998, 0.143858,
0.858144, 0.283712,
0.284713, 0.570428,
0.144859, 0.570428,
0.284713, 0.430573,
0.570428, 0.713285,
0.570428, 0.573431,
0.430573, 0.713285,
0.42757, 0.713285,
0.42757, 0.573431,
0.287716, 0.713285,
0.713285, 0.141856,
0.713285, 0.002002,
0.573431, 0.141856,
0.570428, 0.42757,
0.570428, 0.287716,
0.430573, 0.42757,
0.284713, 0.713285,
0.284713, 0.573431,
0.144859, 0.713285,
0.141856, 0.713285,
0.002002, 0.713285,
0.141856, 0.573431,
0.42757, 0.570428,
0.42757, 0.430573,
0.287716, 0.570428,
0.856142, 0.141856,
0.856142, 0.002002,
0.716288, 0.141856,
0.713285, 0.570428,
0.573431, 0.570428,
0.713285, 0.430573,
0.141856, 0.998999,
0.141856, 0.859145,
0.002002, 0.998999,
0.141856, 0.141856,
0.141856, 0.002002,
0.002002, 0.141856,
0.998999, 0.141856,
0.859145, 0.141856,
0.998999, 0.002002,
0.713285, 0.856142,
0.713285, 0.716288,
0.573431, 0.856142,
0.42757, 0.42757,
0.287716, 0.42757,
0.42757, 0.287716,
0.856142, 0.430573,
0.716288, 0.570428,
0.856142, 0.570428,
0.570428, 0.856142,
0.430573, 0.856142,
0.570428, 0.716288,
0.001001, 0.997998,
0.001001, 0.858144,
0.140855, 0.858144,
0.716288, 0.856142,
0.856142, 0.716288,
0.856142, 0.856142,
0.856142, 0.573431,
0.856142, 0.713285,
0.716288, 0.713285,
0.570428, 0.002002,
0.570428, 0.141856,
0.430573, 0.141856,
0.284713, 0.144859,
0.144859, 0.284713,
0.284713, 0.284713,
0.141856, 0.42757,
0.141856, 0.287716,
0.002002, 0.42757,
0.997998, 0.001001,
0.858144, 0.001001,
0.858144, 0.140855,
0.715287, 0.855141,
0.855141, 0.715287,
0.715287, 0.715287,
0.855141, 0.57243,
0.715287, 0.57243,
0.715287, 0.712284,
0.57243, 0.855141,
0.57243, 0.715287,
0.712284, 0.715287,
0.855141, 0.429572,
0.715287, 0.569427,
0.715287, 0.429572,
0.429572, 0.855141,
0.569427, 0.715287,
0.429572, 0.715287,
0.002002, 0.570428,
0.141856, 0.570428,
0.141856, 0.430573,
0.002002, 0.856142,
0.141856, 0.716288,
0.141856, 0.856142,
0.713285, 0.144859,
0.573431, 0.284713,
0.713285, 0.284713,
0.716288, 0.284713,
0.856142, 0.284713,
0.856142, 0.144859,
0.144859, 0.856142,
0.284713, 0.716288,
0.284713, 0.856142,
0.430573, 0.570428,
0.570428, 0.430573,
0.570428, 0.570428,
0.287716, 0.856142,
0.42757, 0.856142,
0.42757, 0.716288,
0.856142, 0.287716,
0.716288, 0.42757,
0.856142, 0.42757,
0.430573, 0.284713,
0.570428, 0.284713,
0.570428, 0.144859,
0.715287, 0.426569,
0.715287, 0.286715,
0.855141, 0.286715,
0.426569, 0.715287,
0.286715, 0.855141,
0.286715, 0.715287,
0.713285, 0.573431,
0.573431, 0.713285,
0.713285, 0.713285,
0.283712, 0.715287,
0.143858, 0.715287,
0.143858, 0.855141,
0.855141, 0.143858,
0.715287, 0.283712,
0.715287, 0.143858,
0.573431, 0.42757,
0.713285, 0.287716,
0.713285, 0.42757,
0.140855, 0.715287,
0.001001, 0.715287,
0.001001, 0.855141,
0.715287, 0.140855,
0.855141, 0.001001,
0.715287, 0.001001,
0.57243, 0.712284,
0.57243, 0.57243,
0.712284, 0.57243,
0.429572, 0.712284,
0.429572, 0.57243,
0.569427, 0.57243,
0.712284, 0.429572,
0.57243, 0.569427,
0.57243, 0.429572,
0.712284, 0.286715,
0.57243, 0.286715,
0.57243, 0.426569,
0.286715, 0.712284,
0.286715, 0.57243,
0.426569, 0.57243,
0.143858, 0.712284,
0.283712, 0.57243,
0.143858, 0.57243,
0.57243, 0.283712,
0.57243, 0.143858,
0.712284, 0.143858,
0.57243, 0.140855,
0.57243, 0.001001,
0.712284, 0.001001,
0.140855, 0.57243,
0.001001, 0.712284,
0.001001, 0.57243,
0.569427, 0.429572,
0.429572, 0.429572,
0.429572, 0.569427,
0.429572, 0.426569,
0.429572, 0.286715,
0.569427, 0.286715,
0.286715, 0.569427,
0.426569, 0.429572,
0.286715, 0.429572,
0.569427, 0.143858,
0.429572, 0.283712,
0.429572, 0.143858,
0.283712, 0.429572,
0.143858, 0.429572,
0.143858, 0.569427,
0.569427, 0.001001,
0.429572, 0.140855,
0.429572, 0.001001,
0.002002, 0.284713,
0.141856, 0.144859,
0.141856, 0.284713,
0.144859, 0.141856,
0.284713, 0.141856,
0.284713, 0.002002,
0.287716, 0.284713,
0.42757, 0.144859,
0.42757, 0.284713,
0.284713, 0.287716,
0.284713, 0.42757,
0.144859, 0.42757,
0.001001, 0.569427,
0.001001, 0.429572,
0.140855, 0.429572,
0.286715, 0.426569,
0.426569, 0.286715,
0.286715, 0.286715,
0.143858, 0.286715,
0.143858, 0.426569,
0.283712, 0.286715,
0.426569, 0.143858,
0.286715, 0.143858,
0.286715, 0.283712,
0.426569, 0.001001,
0.286715, 0.140855,
0.286715, 0.001001,
0.001001, 0.426569,
0.001001, 0.286715,
0.140855, 0.286715,
0.143858, 0.283712,
0.143858, 0.143858,
0.283712, 0.143858,
0.283712, 0.001001,
0.143858, 0.140855,
0.143858, 0.001001,
0.001001, 0.283712,
0.140855, 0.143858,
0.001001, 0.143858,
0.140855, 0.001001,
0.001001, 0.001001,
0.001001, 0.140855
]);
// prettier-ignore
let weights_arr = Float32Array.from([
// Weights must be assigned manually for now b/c OBJ doesn't support them.
// WARNING: Remaking the mesh file will overwrite your weights here.
]);
// prettier-ignore
let index_arr = Uint16Array.from([
239, 238, 237,
236, 235, 234,
233, 232, 231,
230, 229, 228,
227, 226, 225,
224, 223, 222,
221, 220, 219,
218, 217, 216,
215, 214, 213,
212, 211, 210,
209, 208, 207,
206, 205, 204,
203, 202, 201,
200, 199, 198,
197, 196, 195,
194, 193, 192,
191, 190, 189,
188, 187, 186,
185, 184, 183,
182, 181, 180,
179, 178, 177,
176, 175, 174,
173, 172, 171,
170, 169, 168,
167, 166, 165,
164, 163, 162,
161, 160, 159,
158, 157, 156,
155, 154, 153,
152, 151, 150,
149, 148, 147,
146, 145, 144,
143, 142, 141,
140, 139, 138,
137, 136, 135,
134, 133, 132,
131, 130, 129,
128, 127, 126,
125, 124, 123,
122, 121, 120,
119, 118, 117,
116, 115, 114,
113, 112, 111,
110, 109, 108,
107, 106, 105,
104, 103, 102,
101, 100, 99,
98, 97, 96,
95, 94, 93,
92, 91, 90,
89, 88, 87,
86, 85, 84,
83, 82, 81,
80, 79, 78,
77, 76, 75,
74, 73, 72,
71, 70, 69,
68, 67, 66,
65, 64, 63,
62, 61, 60,
59, 58, 57,
56, 55, 54,
53, 52, 51,
50, 49, 48,
47, 46, 45,
44, 43, 42,
41, 40, 39,
38, 37, 36,
35, 34, 33,
32, 31, 30,
29, 28, 27,
26, 25, 24,
23, 22, 21,
20, 19, 18,
17, 16, 15,
14, 13, 12,
11, 10, 9,
8, 7, 6,
5, 4, 3,
2, 1, 0
]); | the_stack |
import {
AppAction,
EventName,
EventNames,
GenericConditionalTransferAppState,
ProtocolEventMessage,
SupportedApplicationNames,
SyncMessage,
UninstallFailedMessage,
UninstallMessage,
WatcherEventData,
WatcherEvents,
} from "@connext/types";
import { Injectable, OnModuleInit } from "@nestjs/common";
import { AppRegistryService } from "../appRegistry/appRegistry.service";
import { CFCoreService } from "../cfCore/cfCore.service";
import { ChannelService, RebalanceType } from "../channel/channel.service";
import { LoggerService } from "../logger/logger.service";
import { AppActionsService } from "../appRegistry/appActions.service";
import { AppInstanceRepository } from "../appInstance/appInstance.repository";
import { ChannelRepository } from "../channel/channel.repository";
import { TransferRepository } from "../transfer/transfer.repository";
import { ConfigService } from "../config/config.service";
const {
CONDITIONAL_TRANSFER_CREATED_EVENT,
CONDITIONAL_TRANSFER_UNLOCKED_EVENT,
CONDITIONAL_TRANSFER_FAILED_EVENT,
WITHDRAWAL_CONFIRMED_EVENT,
WITHDRAWAL_FAILED_EVENT,
WITHDRAWAL_STARTED_EVENT,
CREATE_CHANNEL_EVENT,
SETUP_FAILED_EVENT,
DEPOSIT_CONFIRMED_EVENT,
DEPOSIT_FAILED_EVENT,
DEPOSIT_STARTED_EVENT,
INSTALL_EVENT,
INSTALL_FAILED_EVENT,
PROPOSE_INSTALL_EVENT,
PROPOSE_INSTALL_FAILED_EVENT,
PROTOCOL_MESSAGE_EVENT,
REJECT_INSTALL_EVENT,
SYNC,
SYNC_FAILED_EVENT,
UNINSTALL_EVENT,
UNINSTALL_FAILED_EVENT,
UPDATE_STATE_EVENT,
UPDATE_STATE_FAILED_EVENT,
} = EventNames;
type ProtocolCallback = {
[index in keyof typeof EventNames]: (data: ProtocolEventMessage<index>) => Promise<any> | void;
};
type WatcherCallback = {
[index in keyof typeof WatcherEvents]: (data: WatcherEventData[index]) => Promise<any> | void;
};
type CallbackStruct = WatcherCallback | ProtocolCallback;
@Injectable()
export default class ListenerService implements OnModuleInit {
constructor(
private readonly appRegistryService: AppRegistryService,
private readonly appActionsService: AppActionsService,
private readonly cfCoreService: CFCoreService,
private readonly channelService: ChannelService,
private readonly configService: ConfigService,
private readonly log: LoggerService,
private readonly appInstanceRepository: AppInstanceRepository,
private readonly channelRepository: ChannelRepository,
private readonly transferRepository: TransferRepository,
) {
this.log.setContext("ListenerService");
}
// TODO: better typing
logEvent<T extends EventName>(event: T, res: any): void {
if (Object.keys(WatcherEvents).includes(event)) {
this.log.debug(`${event} event caught, data: ${JSON.stringify(res)}`);
} else {
this.log.debug(
`${event} event fired from ${res && res.from ? res.from : null}, data: ${
res ? JSON.stringify(res.data) : `event did not have a result`
}`,
);
}
}
getEventListeners(): CallbackStruct {
return {
CONDITIONAL_TRANSFER_CREATED_EVENT: (data): void => {
this.logEvent(CONDITIONAL_TRANSFER_CREATED_EVENT, data);
},
CONDITIONAL_TRANSFER_UNLOCKED_EVENT: (data): void => {
this.logEvent(CONDITIONAL_TRANSFER_UNLOCKED_EVENT, data);
},
CONDITIONAL_TRANSFER_FAILED_EVENT: (data): void => {
this.logEvent(CONDITIONAL_TRANSFER_FAILED_EVENT, data);
},
CREATE_CHANNEL_EVENT: async (data): Promise<void> => {
this.logEvent(CREATE_CHANNEL_EVENT, data);
await this.channelService.makeAvailable(data);
},
SETUP_FAILED_EVENT: (data): void => {
this.logEvent(SETUP_FAILED_EVENT, data);
},
DEPOSIT_CONFIRMED_EVENT: (data): void => {
this.logEvent(DEPOSIT_CONFIRMED_EVENT, data);
},
DEPOSIT_FAILED_EVENT: (data): void => {
this.logEvent(DEPOSIT_FAILED_EVENT, data);
},
DEPOSIT_STARTED_EVENT: (data): void => {
this.logEvent(DEPOSIT_STARTED_EVENT, data);
},
INSTALL_EVENT: async (data): Promise<void> => {
this.logEvent(INSTALL_EVENT, data);
},
INSTALL_FAILED_EVENT: (data): void => {
this.logEvent(INSTALL_FAILED_EVENT, data);
},
PROPOSE_INSTALL_EVENT: async (data): Promise<void> => {
if (data.from === this.cfCoreService.cfCore.publicIdentifier) {
this.log.debug(`Received proposal from our own node. Doing nothing.`);
return;
}
this.logEvent(PROPOSE_INSTALL_EVENT, data);
await this.appRegistryService.installOrReject(
data.data.appInstanceId,
data.data.params as any,
data.from,
);
},
PROPOSE_INSTALL_FAILED_EVENT: (data): void => {
this.logEvent(PROPOSE_INSTALL_FAILED_EVENT, data);
},
PROTOCOL_MESSAGE_EVENT: (data): void => {
this.logEvent(PROTOCOL_MESSAGE_EVENT, data);
},
REJECT_INSTALL_EVENT: (data): void => {
this.logEvent(REJECT_INSTALL_EVENT, data);
},
SYNC: (data: SyncMessage): void => {
this.logEvent(SYNC, data);
},
SYNC_FAILED_EVENT: (data): void => {
this.logEvent(SYNC_FAILED_EVENT, data);
},
UNINSTALL_EVENT: async (data): Promise<void> => {
this.logEvent(UNINSTALL_EVENT, data);
await this.handleUninstall(data);
},
UNINSTALL_FAILED_EVENT: async (data): Promise<void> => {
this.logEvent(UNINSTALL_FAILED_EVENT, data);
await this.handleUninstallFailed(data);
},
UPDATE_STATE_EVENT: async (data): Promise<void> => {
if (data.from === this.cfCoreService.cfCore.publicIdentifier) {
this.log.debug(`Received update state where we were initiator. Doing nothing.`);
return;
}
// if this is for a recipient of a transfer
this.logEvent(UPDATE_STATE_EVENT, data);
const { newState, appIdentityHash, action } = data.data;
const app = await this.cfCoreService.getAppInstance(appIdentityHash);
const appRegistryInfo = this.cfCoreService.getAppInfoByAppDefinitionAddress(
app.appDefinition,
);
if (!appRegistryInfo) {
throw new Error(
`Could not find registry info for updated app ${data.data.appIdentityHash}`,
);
}
await this.appActionsService.handleAppAction(
appRegistryInfo.name as SupportedApplicationNames,
app,
newState as any, // AppState (excluding simple swap app)
action as AppAction,
data.from,
);
},
UPDATE_STATE_FAILED_EVENT: (data): void => {
this.logEvent(UPDATE_STATE_FAILED_EVENT, data);
},
WITHDRAWAL_FAILED_EVENT: (data): void => {
this.logEvent(WITHDRAWAL_FAILED_EVENT, data);
},
WITHDRAWAL_CONFIRMED_EVENT: (data): void => {
this.logEvent(WITHDRAWAL_CONFIRMED_EVENT, data);
},
WITHDRAWAL_STARTED_EVENT: (data): void => {
this.logEvent(WITHDRAWAL_STARTED_EVENT, data);
},
// watcher events
CHALLENGE_UPDATED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_UPDATED_EVENT, msg);
},
STATE_PROGRESSED_EVENT: (msg) => {
this.logEvent(WatcherEvents.STATE_PROGRESSED_EVENT, msg);
},
CHALLENGE_PROGRESSED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_PROGRESSED_EVENT, msg);
},
CHALLENGE_PROGRESSION_FAILED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_PROGRESSION_FAILED_EVENT, msg);
},
CHALLENGE_OUTCOME_FAILED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_OUTCOME_FAILED_EVENT, msg);
},
CHALLENGE_OUTCOME_SET_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_OUTCOME_SET_EVENT, msg);
},
CHALLENGE_COMPLETED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_COMPLETED_EVENT, msg);
},
CHALLENGE_COMPLETION_FAILED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_COMPLETION_FAILED_EVENT, msg);
},
CHALLENGE_CANCELLED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_CANCELLED_EVENT, msg);
},
CHALLENGE_CANCELLATION_FAILED_EVENT: (msg) => {
this.logEvent(WatcherEvents.CHALLENGE_CANCELLATION_FAILED_EVENT, msg);
},
};
}
async handleUninstallFailed(data: UninstallFailedMessage) {
const { params } = data.data;
const receiverApp = await this.appInstanceRepository.findByIdentityHash(params.appIdentityHash);
const nodeSignerAddress = await this.configService.getSignerAddress();
if (
receiverApp?.meta?.paymentId &&
(receiverApp?.latestState as GenericConditionalTransferAppState).coinTransfers[1].to !==
nodeSignerAddress
) {
this.log.warn(
`Uninstall failed, removing stored action for paymentId ${receiverApp.meta.paymentId}`,
);
await this.transferRepository.removeTransferAction(receiverApp.meta.paymentId);
}
}
async handleUninstall(data: UninstallMessage) {
const { action, uninstalledApp, multisigAddress, appIdentityHash } = data.data;
if (!multisigAddress) {
this.log.error(
`Unexpected error - no multisigAddress found in uninstall event data: ${appIdentityHash}`,
);
return;
}
const channel = await this.channelRepository.findByMultisigAddressOrThrow(multisigAddress);
if (action) {
const appRegistryInfo = this.cfCoreService.getAppInfoByAppDefinitionAddress(
uninstalledApp.appDefinition,
);
await this.appActionsService.handleAppAction(
appRegistryInfo.name as SupportedApplicationNames,
uninstalledApp,
uninstalledApp.latestState as any, // AppState (excluding simple swap app)
action as AppAction,
data.from,
);
}
const assetIdResponder = (
await this.appInstanceRepository.findByIdentityHashOrThrow(data.data.appIdentityHash)
).responderDepositAssetId;
try {
await this.channelService.rebalance(
channel.multisigAddress,
assetIdResponder,
RebalanceType.RECLAIM,
);
} catch (e) {
this.log.error(`Caught error rebalancing channel ${channel.multisigAddress}: ${e.stack}`);
}
}
onModuleInit(): void {
Object.entries(
this.getEventListeners(),
).forEach(
([event, callback]: [any, (data: ProtocolEventMessage<any>) => void | Promise<void>]) =>
this.cfCoreService.registerCfCoreListener(event, callback),
);
}
} | the_stack |
import { mocked } from "ts-jest/utils";
import { RichTextEditor } from "../../src/rich-text/editor";
import * as mdp from "../../src/shared/markdown-parser";
import "../matchers";
import { normalize } from "../test-helpers";
// mock the markdown-parser for testing
jest.mock("../../src/shared/markdown-parser");
// set the typings for easy function mocking
const mockedMdp = mocked(mdp, true);
// import the "actual" buildMarkdownParser function to use in our mock implementations
const { buildMarkdownParser } = jest.requireActual<typeof mdp>(
"../../src/shared/markdown-parser"
);
// we need to mock buildMarkdownParser for only a single test, so set default "mock" to the actual function
mockedMdp.buildMarkdownParser.mockImplementation(buildMarkdownParser);
function editorDom(editorView: RichTextEditor): string {
return editorView.dom.innerHTML;
}
function richView(markdownInput: string) {
return new RichTextEditor(document.createElement("div"), markdownInput, {
linkPreviewProviders: [
{
domainTest: /example.com/,
renderer: (url) => {
const el = document.createElement("div");
el.innerText = url;
return Promise.resolve(el);
},
},
],
});
}
describe("rich text editor view", () => {
const markdownRenderingTestData = [
["bold", "**bold**", "<p><strong>bold</strong></p>"],
["emphasis", "*emphasized*", "<p><em>emphasized</em></p>"],
[
"bold + emphasis",
"***bold and emphasized***",
"<p><em><strong>bold and emphasized</strong></em></p>",
],
["bold with underscore", "__bold__", "<p><strong>bold</strong></p>"],
[
"emphasis with underscore",
"_emphasized_",
"<p><em>emphasized</em></p>",
],
[
"bold + emphasis with underscore",
"___bold and emphasized___",
"<p><em><strong>bold and emphasized</strong></em></p>",
],
[
"links, non-oneboxed",
'here is [a link](https://example.com "link title here")',
'<p>here is <a href="https://example.com" title="link title here">a link</a></p>',
],
[
"inline code",
"some `code` here",
"<p>some <code>code</code> here</p>",
],
[
"strikethrough",
"~~strikethrough~~",
"<p><del>strikethrough</del></p>",
],
["headlines 1", "# headline", `<h1>headline</h1>`],
["headlines 2", "## headline", `<h2>headline</h2>`],
["headlines 3", "### headline", `<h3>headline</h3>`],
["headlines 4", "#### headline", `<h4>headline</h4>`],
["headlines 5", "##### headline", `<h5>headline</h5>`],
["headlines 6", "###### headline", `<h6>headline</h6>`],
[
"blockquotes",
"> blockquote",
"<blockquote><p>blockquote</p></blockquote>",
],
[
"ordered lists",
"1. some\n2. list",
`<ol data-tight="true"><li><p>some</p></li><li><p>list</p></li></ol>`,
],
[
"unordered lists",
"- some\n- list",
`<ul data-tight="true"><li><p>some</p></li><li><p>list</p></li></ul>`,
],
[
"softbreaks",
"test1\ntest2",
// TODO there should be a space inside the softbreak, but the normalizer kills it
'<p>test1<span softbreak=""> </span>test2</p>',
],
];
describe("markdown rendering", () => {
it.each(markdownRenderingTestData)(
"should render %s",
(name, markdown, expectedHtml) => {
const richEditorView = richView(markdown);
expect(editorDom(richEditorView)).toEqual(
normalize(expectedHtml)
);
}
);
it("should render images as node view", () => {
const markdown =
'';
const richEditorView = richView(markdown);
const img = richEditorView.dom.querySelector("img");
expect(img.alt).toEqual("some image");
expect(img.src).toEqual("https://example.com/some.png");
expect(img.title).toEqual("image title here");
});
it("should render code blocks as node view", () => {
const markdown = '```\nconsole.log("hello, world!")\n```';
const richEditorView = richView(markdown);
const preElement = richEditorView.dom.querySelector("pre");
const expectedCodeHtml = `<code class="content-dom">console.<span class="hljs-built_in">log</span>(<span class="hljs-string">"hello, world!"</span>)</code>`;
expect(preElement.innerHTML).toEqual(normalize(expectedCodeHtml));
});
});
// see https://meta.stackexchange.com/questions/1777/what-html-tags-are-allowed-on-stack-`exchange-sites
const supportedHtmlTagsTestData = [
[
"a",
"link here: <a href='https://example.com'>link</a>",
`<p>link here: <a href='https://example.com'>link</a></p>`,
],
["b", "<b>bold</b>", `<p><strong>bold</strong></p>`],
[
"blockquote",
"<blockquote>quote here</blockquote>",
`<blockquote><p>quote here</p></blockquote>`,
],
["code", "<code>rm -rf /</code>", `<p><code>rm -rf /</code></p>`],
["del", "<del>deleted</del>", `<p><del>deleted</del></p>`],
["em", "<em>emphasis</em>", `<p><em>emphasis</em></p>`],
["h1", "<h1>text</h1>", `<h1>text</h1>`],
["h2", "<h2>text</h2>", `<h2>text</h2>`],
["h3", "<h3>text</h3>", `<h3>text</h3>`],
["h4", "<h4>text</h4>", `<h4>text</h4>`],
["h5", "<h5>text</h5>", `<h5>text</h5>`],
["h6", "<h6>text</h6>", `<h6>text</h6>`],
["i", "<i>emphasis</i>", `<p><em>emphasis</em></p>`],
["kbd", "<kbd>Enter</kbd>", `<p><kbd>Enter</kbd></p>`],
[
"ul, li",
"<ul><li>item</li></ul>",
`<div class="html_block ProseMirror-widget">
<ul>
<li>item</li>
</ul>
</div>`,
],
[
"ol, li",
"<ol><li>item</li></ol>",
`<div class="html_block ProseMirror-widget">
<ol>
<li>item</li>
</ol>
</div>`,
],
["p", "<p>paragraph</p>", `<p>paragraph</p>`],
["pre", "<pre>test</pre>", `<pre><p>test</p></pre>`],
["s", "<s>deleted</s>", `<p><del>deleted</del></p>`],
["sub", "<sub>subscript</sub>", `<p><sub>subscript</sub></p>`],
["sup", "<sup>superscript</sup>", `<p><sup>superscript</sup></p>`],
["strong", "<strong>strong</strong>", `<p><strong>strong</strong></p>`],
["strike", "<strike>text</strike>", `<p><del>text</del></p>`],
["br", "<br>", `<p><br><br></p>`],
["hr", "<hr>", `<div><hr></div>`],
];
describe("html in markdown mode", () => {
it.each(supportedHtmlTagsTestData)(
"should render %s tag (test #%#)",
(name, markdown, expectedHtml) => {
const richEditorView = richView(markdown);
const expected = normalize(expectedHtml);
const actual = normalize(editorDom(richEditorView));
expect(actual).toEqual(expected);
}
);
it("should escape unrecognized/unsupported inline elements as plain text", () => {
const markdown = `<fake>not real</fake>`;
const richEditorView = richView(markdown);
const expectedHtml = normalize(
`<p><fake>not real</fake></p>`
);
expect(editorDom(richEditorView)).toEqual(expectedHtml);
});
// TODO html_block
it.skip("should escape unsupported html_block elements as plain text", () => {
const markdown = `<div style="font-size: 10em;">huge text</div>`;
const richEditorView = richView(markdown);
const expectedHtml = normalize(
`<div class="html_block"><div style="font-size: 10em;">huge text</div><br></div>`
);
expect(editorDom(richEditorView)).toEqual(expectedHtml);
});
it("should parse complex/split html block structures", () => {
// since there's a newline, the html block will be interrupted (splitting it!)
// it'll instead look like: [html_block, p(inline(em, text, softbreak, html_inline)), html_block]
// therefore, the `**Hello**` will NOT be parsed, but the `_world_` WILL be
const markdown = `
<blockquote>
<pre>
**Hello**,
_world_.
</pre>
</blockquote>`;
const richEditorView = richView(markdown);
const expectedHtml = normalize(
`<div class="html_block_container ProseMirror-widget"><blockquote>\n<pre>**Hello**,\n<div class="ProseMirror-contentdom"><p><em>world</em>.<span softbreak=""> </span><span class="html_inline"></pre></span><br></p></div></pre></blockquote></div>`
);
expect(normalize(editorDom(richEditorView))).toEqual(expectedHtml);
});
it("should allow nested HTML", () => {
const markdown = "<strong><em><del>wtf?</del></em></strong>";
const richEditorView = richView(markdown);
const expectedHtml = `<p><em><del><strong>wtf?</strong></del></em></p>`;
expect(editorDom(richEditorView)).toEqual(normalize(expectedHtml));
});
it("should render markdown within inline HTML", () => {
const markdown = "<em>**text**</em>";
const richEditorView = richView(markdown);
const expectedHtml = `<p><em><strong>text</strong></em></p>`;
expect(editorDom(richEditorView)).toEqual(normalize(expectedHtml));
});
it("should render markdown within block HTML as plain text", () => {
const markdown = "<blockquote>**text**</blockquote>";
const richEditorView = richView(markdown);
const expectedHtml = `<blockquote><p>**text**</p></blockquote>`;
expect(editorDom(richEditorView)).toEqual(normalize(expectedHtml));
});
});
describe("decorations", () => {
it("should onebox links on a single line", () => {
const markdown = "[some link](https://example.com)\n";
const richEditorView = richView(markdown);
const oneboxDom = richEditorView.dom.querySelectorAll(
".js-placeholder"
);
expect(oneboxDom).toHaveLength(1);
// wait for the promise to resolve (immediately) and check that the async content was pulled in
setTimeout(() => {
expect(oneboxDom[0].textContent).toBe("https://example.com");
}, 0);
});
it("should not onebox links with additional text one the same line", () => {
const markdown = "here is [some link](https://example.com)\n";
const richEditorView = richView(markdown);
const oneboxDom = richEditorView.dom.querySelectorAll(".js-onebox");
expect(oneboxDom).toHaveLength(0);
});
});
describe("tables", () => {
it("should render tables", () => {
const markdown = `
| Table | With | Alignments |
| ---------- |:-----------:| ----------:|
| left | center | right |
| also left | also center | also right |
`;
const richEditorView = richView(markdown);
const table = richEditorView.dom;
expect(table.querySelectorAll("tr")).toHaveLength(3);
expect(table.querySelectorAll("th")).toHaveLength(3);
expect(table.querySelectorAll("td")).toHaveLength(6);
expect(table.querySelectorAll("td")[1].style.textAlign).toEqual(
"center"
);
expect(table.querySelectorAll("td")[2].style.textAlign).toContain(
"right"
);
});
});
describe("general", () => {
it.each(["", "# testing some *stuff*"])(
"should get and set content",
(content) => {
const baseContent = "# Here is _some_\n\n> **base** content";
const view = richView(baseContent);
// check the initial value
expect(view.content).toBe(baseContent);
// set it
view.content = content;
// check that the new value is correct
expect(view.content).toBe(content);
}
);
it("should recover from catastrophic markdown parse crashes", () => {
// update the mock of buildMarkdownParser to add in a faulty md plugin
mockedMdp.buildMarkdownParser.mockImplementation((...args) => {
// go ahead and get the usual parser
const parser = buildMarkdownParser.call(
null,
...args
) as ReturnType<typeof buildMarkdownParser>;
// add our purposefully busted plugin to force a crash
parser.tokenizer.use((md) => {
md.core.ruler.push("crash_me", () => {
throw "sucks to be you";
});
});
return parser;
});
const editor = new RichTextEditor(
document.createElement("div"),
"*This* is some **test** content"
);
// on a catastrophic crash, the raw string content gets
// added into a code_block with a warning to the user attached
expect(editor.document).toMatchNodeTree({
childCount: 2,
content: [
{
"type.name": "heading",
"childCount": 1,
"content": [
{
"type.isText": true,
"text":
"WARNING! There was an error parsing the document",
},
],
},
{
"type.name": "code_block",
"childCount": 1,
"content": [
{
"type.isText": true,
"text": "*This* is some **test** content",
},
],
},
],
});
});
});
}); | the_stack |
import * as AlgebraicType from './algebraic-type';
import * as AlgebraicTypeCreation from './algebraic-type-creation';
import * as AlgebraicTypeParser from './algebraic-type-parser';
import * as CommandLine from './commandline';
import * as Configuration from './configuration';
import * as Either from './either';
import * as Error from './error';
import * as File from './file';
import * as FileFinder from './file-finder';
import * as FileWriter from './file-writer';
import * as List from './list';
import * as Logging from './logging';
import * as LoggingSequenceUtils from './logged-sequence-utils';
import * as Maybe from './maybe';
import * as OutputControl from './output-control';
import * as PathUtils from './path-utils';
import * as PluginInclusionUtils from './plugin-inclusion-utils';
import * as Promise from './promise';
import * as ReadFileUtils from './file-logged-sequence-read-utils';
import * as RequirePlugin from './require-plugin';
import * as WriteFileUtils from './file-logged-sequence-write-utils';
interface AlgebraicTypeCreationContext {
baseClassName: string;
baseClassLibraryName: string | null;
diagnosticIgnores: List.List<string>;
plugins: List.List<AlgebraicType.Plugin>;
defaultIncludes: List.List<string>;
}
interface PathAndTypeInfo {
path: File.AbsoluteFilePath;
typeInformation: AlgebraicType.Type;
}
interface GenerationOptions {
outputPath: File.AbsoluteFilePath | null;
outputFlags: OutputControl.OutputFlags;
}
const BASE_INCLUDES: List.List<string> = List.of(
'AlgebraicTypeInitialization',
'RMAssertNullability',
'RMCopying',
'RMDescription',
'RMEquality',
'RMInitNewUnavailable',
'VoidMatching',
);
const BASE_PLUGINS: List.List<string> = List.of(
'algebraic-type-initialization',
'assert-nullability',
'assume-nonnull',
'coding',
'copying',
'copying-type-safety',
'description',
'equality',
'init-new-unavailable',
'subclassing-restricted',
'use-cpp',
'algebraic-type-matching-bool',
'algebraic-type-matching-double',
'algebraic-type-matching-generic',
'algebraic-type-matching-integer',
'algebraic-type-matching-void',
'algebraic-type-case-matching',
'algebraic-type-templated-matching',
);
function evaluateUnparsedAlgebraicTypeCreationRequest(
request: ReadFileUtils.UnparsedObjectCreationRequest,
): Either.Either<Error.Error[], PathAndTypeInfo> {
const parseResult: Either.Either<Error.Error[], AlgebraicType.Type> =
AlgebraicTypeParser.parse(File.getContents(request.fileContents));
return Either.map(function (foundType: AlgebraicType.Type) {
return {path: request.path, typeInformation: foundType};
}, parseResult);
}
function parseValues(
either: Either.Either<
Error.Error[],
ReadFileUtils.UnparsedObjectCreationRequest
>,
): Promise.Future<
Logging.Context<Either.Either<Error.Error[], PathAndTypeInfo>>
> {
return Promise.munit(
Logging.munit(
Either.mbind(evaluateUnparsedAlgebraicTypeCreationRequest, either),
),
);
}
function typeInformationContainingDefaultIncludes(
typeInformation: AlgebraicType.Type,
defaultIncludes: List.List<string>,
): AlgebraicType.Type {
return {
annotations: typeInformation.annotations,
comments: typeInformation.comments,
excludes: typeInformation.excludes,
includes: PluginInclusionUtils.includesContainingDefaultIncludes(
typeInformation.includes,
typeInformation.excludes,
defaultIncludes,
),
libraryName: typeInformation.libraryName,
name: typeInformation.name,
typeLookups: typeInformation.typeLookups,
subtypes: typeInformation.subtypes,
};
}
function processAlgebraicTypeCreationRequest(
options: GenerationOptions,
future: Promise.Future<
Either.Either<Error.Error[], AlgebraicTypeCreationContext>
>,
either: Either.Either<Error.Error[], PathAndTypeInfo>,
): Promise.Future<
Logging.Context<Either.Either<Error.Error[], FileWriter.FileWriteRequest>>
> {
return Promise.map(function (
creationContextEither: Either.Either<
Error.Error[],
AlgebraicTypeCreationContext
>,
) {
return Logging.munit(
Either.mbind(function (pathAndTypeInfo: PathAndTypeInfo) {
return Either.mbind(function (
creationContext: AlgebraicTypeCreationContext,
) {
const request: AlgebraicTypeCreation.Request = {
diagnosticIgnores: creationContext.diagnosticIgnores,
baseClassLibraryName: creationContext.baseClassLibraryName,
baseClassName: creationContext.baseClassName,
path: pathAndTypeInfo.path,
outputPath: options.outputPath,
outputFlags: options.outputFlags,
typeInformation: typeInformationContainingDefaultIncludes(
pathAndTypeInfo.typeInformation,
creationContext.defaultIncludes,
),
};
return AlgebraicTypeCreation.fileWriteRequest(
request,
creationContext.plugins,
);
},
creationContextEither);
}, either),
);
},
future);
}
function pluginsFromPluginConfigs(
pluginConfigs: List.List<Configuration.PluginConfig>,
): Either.Either<Error.Error[], List.List<AlgebraicType.Plugin>> {
return List.foldr(
function (
soFar: Either.Either<Error.Error[], List.List<AlgebraicType.Plugin>>,
config: Configuration.PluginConfig,
): Either.Either<Error.Error[], List.List<AlgebraicType.Plugin>> {
return Either.mbind(function (
list: List.List<AlgebraicType.Plugin>,
): Either.Either<Error.Error[], List.List<AlgebraicType.Plugin>> {
return Either.map(function (
maybePlugin: AlgebraicType.Plugin | null,
): List.List<AlgebraicType.Plugin> {
return Maybe.match(
function (plugin: AlgebraicType.Plugin) {
return List.cons(plugin, list);
},
function () {
return list;
},
maybePlugin,
);
},
RequirePlugin.requireAlgebraicTypePlugin(config.absolutePath));
},
soFar);
},
Either.Right<Error.Error[], List.List<AlgebraicType.Plugin>>(
List.of<AlgebraicType.Plugin>(),
),
pluginConfigs,
);
}
function getAlgebraicTypeCreationContext(
findConfigFuture: Promise.Future<File.AbsoluteFilePath | null>,
parsedArgs: CommandLine.Arguments,
): Promise.Future<Either.Either<Error.Error[], AlgebraicTypeCreationContext>> {
return Promise.mbind(function (
maybePath: File.AbsoluteFilePath | null,
): Promise.Future<
Either.Either<Error.Error[], AlgebraicTypeCreationContext>
> {
const configurationContext: Configuration.ConfigurationContext = {
basePlugins: BASE_PLUGINS,
baseIncludes: BASE_INCLUDES,
};
const configFuture: Promise.Future<
Either.Either<Error.Error[], Configuration.GenerationConfig>
> = Configuration.generateConfig(maybePath, configurationContext);
return Promise.map(function (
either: Either.Either<Error.Error[], Configuration.GenerationConfig>,
): Either.Either<Error.Error[], AlgebraicTypeCreationContext> {
return Either.match(
function (
error: Error.Error[],
): Either.Either<Error.Error[], AlgebraicTypeCreationContext> {
return Either.Left<Error.Error[], AlgebraicTypeCreationContext>(
error,
);
},
function (
configuration: Configuration.GenerationConfig,
): Either.Either<Error.Error[], AlgebraicTypeCreationContext> {
const pluginsEither: Either.Either<
Error.Error[],
List.List<AlgebraicType.Plugin>
> = pluginsFromPluginConfigs(configuration.pluginConfigs);
return Either.map(function (
plugins: List.List<AlgebraicType.Plugin>,
): AlgebraicTypeCreationContext {
return {
baseClassName: configuration.baseClassName,
baseClassLibraryName: configuration.baseClassLibraryName,
diagnosticIgnores: configuration.diagnosticIgnores,
plugins: plugins,
defaultIncludes: List.fromArray<string>(
PluginInclusionUtils.includesContainingDefaultIncludes(
parsedArgs.includes,
parsedArgs.excludes,
configuration.defaultIncludes,
),
),
};
},
pluginsEither);
},
either,
);
},
configFuture);
},
findConfigFuture);
}
function outputDirectory(
directoryRunFrom: string,
outputPath: string | undefined,
): File.AbsoluteFilePath | null {
if (outputPath === undefined || outputPath === '') {
return null;
} else {
return PathUtils.getAbsolutePathFromDirectoryAndAbsoluteOrRelativePath(
File.getAbsoluteFilePath(directoryRunFrom),
outputPath,
);
}
}
export function generate(
directoryRunFrom: string,
optionalConfigPath: string | undefined,
parsedArgs: CommandLine.Arguments,
): Promise.Future<List.List<WriteFileUtils.ConsoleOutputResults>> {
const promises = parsedArgs.givenPaths.map((givenPath) => {
const requestedPath: File.AbsoluteFilePath =
PathUtils.getAbsolutePathFromDirectoryAndAbsoluteOrRelativePath(
File.getAbsoluteFilePath(directoryRunFrom),
givenPath,
);
const outputPath: File.AbsoluteFilePath | null = outputDirectory(
directoryRunFrom,
parsedArgs.outputPath,
);
const configPath = optionalConfigPath
? FileFinder.findConfigAtPath(
File.getAbsoluteFilePath(optionalConfigPath),
)
: FileFinder.searchForConfig('.algebraicTypeConfig', requestedPath);
const algebraicTypeCreationContextFuture = getAlgebraicTypeCreationContext(
configPath,
parsedArgs,
);
const readFileSequence = ReadFileUtils.loggedSequenceThatReadsFiles(
requestedPath,
'adtValue',
);
const parsedSequence = LoggingSequenceUtils.mapLoggedSequence(
readFileSequence,
parseValues,
);
const options: GenerationOptions = {
outputPath: outputPath,
outputFlags: parsedArgs.outputFlags,
};
const pluginProcessedSequence = LoggingSequenceUtils.mapLoggedSequence(
parsedSequence,
(either) =>
processAlgebraicTypeCreationRequest(
options,
algebraicTypeCreationContextFuture,
either,
),
);
return WriteFileUtils.evaluateObjectFileWriteRequestSequence(
parsedArgs,
pluginProcessedSequence,
);
});
return Promise.all(List.fromArray(promises));
} | the_stack |
import { Container, Enums, Utils } from "@packages/core-kernel";
import { ChunkCache } from "@packages/core-p2p/src/chunk-cache";
import { NetworkMonitor } from "@packages/core-p2p/src/network-monitor";
import { NetworkState } from "@packages/core-p2p/src/network-state";
import { Peer } from "@packages/core-p2p/src/peer";
import { PeerVerificationResult } from "@packages/core-p2p/src/peer-verifier";
import { Blocks } from "@packages/crypto";
import delay from "delay";
import { cloneDeep } from "lodash";
import path from "path";
jest.setTimeout(60000);
describe("NetworkMonitor", () => {
let networkMonitor: NetworkMonitor;
const container = new Container.Container();
const logger = { warning: jest.fn(), debug: jest.fn(), error: jest.fn(), info: jest.fn() };
const config = {
dns: ["1.1.1.1"],
ntp: ["time.google.com"],
skipDiscovery: undefined,
networkStart: undefined,
disableDiscovery: undefined,
verifyTimeout: undefined,
ignoreMinimumNetworkReach: undefined,
minimumNetworkReach: undefined,
whitelist: [],
remoteAccess: [],
};
const pluginConfiguration = { all: () => config };
const emitter = { dispatch: jest.fn() };
const communicator = {
ping: jest.fn(),
getPeers: jest.fn(),
getPeerBlocks: jest.fn(),
postBlock: jest.fn(),
pingPorts: jest.fn(),
};
const repository = { getPeers: jest.fn(), forgetPeer: jest.fn() };
const triggerService = { call: jest.fn() }; // validateAndAcceptPeer
const stateStore = { getLastBlock: jest.fn() };
const blockchain = { getBlockPing: jest.fn(), getLastBlock: jest.fn() };
const appGet = {
[Container.Identifiers.TriggerService]: triggerService,
[Container.Identifiers.StateStore]: stateStore,
[Container.Identifiers.BlockchainService]: blockchain,
[Container.Identifiers.LogService]: logger,
};
const appConfigPeers = {
list: [],
sources: [],
};
const app = {
get: (key) => appGet[key],
getTagged: () => ({ getOptional: () => 10 }), // minimumNetworkReach in NetworkState analyze
config: () => appConfigPeers,
version: () => "3.0.0",
terminate: jest.fn(),
log: logger,
};
beforeAll(() => {
container.unbindAll();
container.bind(Container.Identifiers.LogService).toConstantValue(logger);
container.bind(Container.Identifiers.PeerChunkCache).to(ChunkCache).inSingletonScope();
container.bind(Container.Identifiers.PeerNetworkMonitor).to(NetworkMonitor);
container.bind(Container.Identifiers.EventDispatcherService).toConstantValue(emitter);
container.bind(Container.Identifiers.PeerCommunicator).toConstantValue(communicator);
container.bind(Container.Identifiers.PeerRepository).toConstantValue(repository);
container.bind(Container.Identifiers.PluginConfiguration).toConstantValue(pluginConfiguration);
container.bind(Container.Identifiers.Application).toConstantValue(app);
});
beforeEach(() => {
networkMonitor = container.get<NetworkMonitor>(Container.Identifiers.PeerNetworkMonitor);
networkMonitor.initialize();
jest.resetAllMocks();
});
afterEach(() => {
jest.restoreAllMocks();
});
describe.each([[true], [false]])("boot", (dnsAndNtpFail) => {
beforeEach(() => {
if (dnsAndNtpFail) {
config.ntp = ["nontp.notworking.com"];
config.dns = ["nodns.notworking.com"];
}
});
afterEach(() => {
config.dns = ["1.1.1.1"];
config.ntp = ["time.google.com"];
});
describe("when peer discovery is disabled", () => {
beforeEach(() => {
config.skipDiscovery = true;
});
afterEach(() => {
config.skipDiscovery = false;
appConfigPeers.list = [];
appConfigPeers.sources = [];
});
it("should populate peers from seed peers config by calling validateAndAcceptPeer", async () => {
appConfigPeers.list = [
{ ip: "187.177.54.44", port: 4000 },
{ ip: "188.177.54.44", port: 4000 },
{ ip: "189.177.54.44", port: 4000 },
];
await networkMonitor.boot();
expect(triggerService.call).toBeCalledTimes(appConfigPeers.list.length); // validateAndAcceptPeer for each peer
for (const peer of appConfigPeers.list) {
expect(triggerService.call).toBeCalledWith("validateAndAcceptPeer", {
peer: expect.objectContaining(peer),
options: { seed: true, lessVerbose: true },
});
}
});
it("should populate peers from URL config by calling validateAndAcceptPeer", async () => {
appConfigPeers.sources = ["http://peers.someurl.com"];
const peers = [
{ ip: "187.177.54.44", port: 4000 },
{ ip: "188.177.54.44", port: 4000 },
{ ip: "189.177.54.44", port: 4000 },
{ ip: "190.177.54.44", port: 4000 },
{ ip: "191.177.54.44", port: 4000 },
];
jest.spyOn(Utils.http, "get").mockResolvedValueOnce({ data: peers } as Utils.HttpResponse);
await networkMonitor.boot();
expect(triggerService.call).toBeCalledTimes(peers.length); // for each peer validateAndAcceptPeer is called
for (const peer of peers) {
expect(triggerService.call).toBeCalledWith("validateAndAcceptPeer", {
peer: expect.objectContaining(peer),
options: { seed: true, lessVerbose: true },
});
}
});
it("should populate peers from URL config by calling validateAndAcceptPeer, when body is string", async () => {
appConfigPeers.sources = ["http://peers.someurl.com"];
const peers = [
{ ip: "187.177.54.44", port: 4000 },
{ ip: "188.177.54.44", port: 4000 },
{ ip: "189.177.54.44", port: 4000 },
{ ip: "190.177.54.44", port: 4000 },
{ ip: "191.177.54.44", port: 4000 },
];
jest.spyOn(Utils.http, "get").mockResolvedValueOnce({
data: JSON.stringify(peers),
} as Utils.HttpResponse);
await networkMonitor.boot();
expect(triggerService.call).toBeCalledTimes(peers.length); // for each peer validateAndAcceptPeer is called
for (const peer of peers) {
expect(triggerService.call).toBeCalledWith("validateAndAcceptPeer", {
peer: expect.objectContaining(peer),
options: { seed: true, lessVerbose: true },
});
}
});
it("should handle as empty array if appConfigPeers.sources is undefined", async () => {
// @ts-ignore
appConfigPeers.sources = undefined;
await networkMonitor.boot();
expect(triggerService.call).toBeCalledTimes(0); // for each peer validateAndAcceptPeer is called
});
it("should populate peers only once if same peer is in list and sources", async () => {
appConfigPeers.sources = ["http://peers.someurl.com"];
const peers = [{ ip: "187.177.54.44", port: 4000 }];
appConfigPeers.list = [{ ip: "187.177.54.44", port: 4000 }];
jest.spyOn(Utils.http, "get").mockResolvedValueOnce({ data: peers } as Utils.HttpResponse);
await networkMonitor.boot();
expect(triggerService.call).toBeCalledTimes(peers.length); // for each peer validateAndAcceptPeer is called
for (const peer of peers) {
expect(triggerService.call).toBeCalledWith("validateAndAcceptPeer", {
peer: expect.objectContaining(peer),
options: { seed: true, lessVerbose: true },
});
}
});
it("should populate peers from file by calling validateAndAcceptPeer", async () => {
appConfigPeers.sources = [path.resolve(__dirname, "fixtures", "peers.json")];
await networkMonitor.boot();
const peers = require("./fixtures/peers.json");
expect(triggerService.call).toBeCalledTimes(peers.length); // validateAndAcceptPeer for each peer in peers.json
for (const peer of peers) {
expect(triggerService.call).toBeCalledWith("validateAndAcceptPeer", {
peer: expect.objectContaining(peer),
options: { seed: true, lessVerbose: true },
});
}
});
});
describe("when peer discovery is enabled", () => {
beforeEach(() => {
process.env.NODE_ENV = "test";
config.skipDiscovery = false;
});
afterEach(() => {
delete process.env.NODE_ENV;
});
it("should discover peers from seed peers (calling updateNetworkStatus) and log the peers discovered by version", async () => {
const spyUpdateNetworkStatus = jest.spyOn(networkMonitor, "updateNetworkStatus");
const peers = [
{ ip: "187.177.54.44", port: 4000, version: "3.0.0" },
{ ip: "188.177.54.44", port: 4000, version: "3.0.0" },
{ ip: "189.177.54.44", port: 4000, version: "3.0.1" },
{ ip: "190.177.54.44", port: 4000, version: "3.0.2" },
{ ip: "191.177.54.44", port: 4000, version: "3.0.2" },
];
repository.getPeers = jest.fn().mockReturnValueOnce(peers);
await networkMonitor.boot();
expect(spyUpdateNetworkStatus).toBeCalledTimes(1);
expect(spyUpdateNetworkStatus).toBeCalledWith(true);
expect(logger.info).toBeCalledWith("Discovered 2 peers with v3.0.0.");
expect(logger.info).toBeCalledWith("Discovered 1 peer with v3.0.1.");
expect(logger.info).toBeCalledWith("Discovered 2 peers with v3.0.2.");
});
});
});
describe("updateNetworkStatus", () => {
describe("when process.env.NODE_ENV === 'test'", () => {
beforeEach(() => {
process.env.NODE_ENV = "test";
});
afterEach(() => {
delete process.env.NODE_ENV;
});
it("should not do anything", async () => {
const spyDiscoverPeers = jest.spyOn(networkMonitor, "discoverPeers");
await networkMonitor.updateNetworkStatus();
expect(spyDiscoverPeers).toBeCalledTimes(0);
expect(logger.warning).toBeCalledTimes(0);
});
});
describe("when in 'network start' mode", () => {
beforeEach(() => {
delete process.env.NODE_ENV;
config.networkStart = true;
});
afterEach(() => {
config.networkStart = undefined;
});
it("should set coldStart to true and discover peers", async () => {
const spyDiscoverPeers = jest.spyOn(networkMonitor, "discoverPeers").mockResolvedValue(false);
// @ts-ignore
const spyHasMinimumPeers = jest.spyOn(networkMonitor, "hasMinimumPeers").mockReturnValue(true);
expect(networkMonitor.isColdStart()).toBeFalse();
await networkMonitor.updateNetworkStatus();
expect(networkMonitor.isColdStart()).toBeTrue();
expect(spyDiscoverPeers).toBeCalledTimes(1);
expect(spyHasMinimumPeers).toBeCalledTimes(1);
});
});
describe("when in 'disable discovery' mode", () => {
beforeEach(() => {
config.disableDiscovery = true;
});
afterEach(() => {
config.disableDiscovery = undefined;
});
it("should log a warning message and not discover peers", async () => {
const spyDiscoverPeers = jest.spyOn(networkMonitor, "discoverPeers");
await networkMonitor.updateNetworkStatus();
expect(logger.warning).toBeCalledWith(
"Skipped peer discovery because the relay is in non-discovery mode.",
);
expect(spyDiscoverPeers).toBeCalledTimes(0);
});
});
it("should discover new peers from existing", async () => {
repository.getPeers.mockReturnValue([]);
const spyDiscoverPeers = jest.spyOn(networkMonitor, "discoverPeers");
await networkMonitor.updateNetworkStatus();
expect(spyDiscoverPeers).toBeCalledTimes(1);
});
it("should log an error when discovering new peers fails", async () => {
repository.getPeers.mockReturnValue([]);
const spyDiscoverPeers = jest.spyOn(networkMonitor, "discoverPeers");
const errorMessage = "failed discovering peers";
spyDiscoverPeers.mockRejectedValueOnce(new Error(errorMessage));
await networkMonitor.updateNetworkStatus();
expect(spyDiscoverPeers).toBeCalledTimes(1);
expect(logger.error).toBeCalledTimes(1);
expect(logger.error).toBeCalledWith(`Network Status: ${errorMessage}`);
});
describe("when we are below minimum peers", () => {
it("should fall back to seed peers when after discovering we are below minimum peers", async () => {
config.minimumNetworkReach = 5;
repository.getPeers.mockReturnValue([]);
await networkMonitor.updateNetworkStatus();
expect(logger.info).toBeCalledWith("Couldn't find enough peers. Falling back to seed peers.");
});
it("should not fall back to seed peers when config.ignoreMinimumNetworkReach", async () => {
config.minimumNetworkReach = 5;
config.ignoreMinimumNetworkReach = true;
repository.getPeers.mockReturnValue([]);
await networkMonitor.updateNetworkStatus();
expect(logger.info).not.toBeCalledWith("Couldn't find enough peers. Falling back to seed peers.");
});
});
it("should schedule the next updateNetworkStatus only once", async () => {
repository.getPeers.mockReturnValue([]);
let sleeping = true;
const mockSleep = async () => {
while (sleeping) {
await delay(10);
}
};
const spySleep = jest.spyOn(Utils, "sleep").mockImplementationOnce(mockSleep);
await networkMonitor.updateNetworkStatus();
expect(spySleep).toBeCalledTimes(1);
await networkMonitor.updateNetworkStatus();
expect(spySleep).toBeCalledTimes(1);
sleeping = false;
await delay(20); // give time to mockSleep to end and scheduleUpdateNetworkStatus to finish
await networkMonitor.updateNetworkStatus();
expect(spySleep).toBeCalledTimes(2); // because no more pending nextUpdateNetworkStatusScheduled
});
});
describe("cleansePeers", () => {
const peers = [
new Peer("187.177.54.44", 4000),
new Peer("188.177.54.44", 4000),
new Peer("189.177.54.44", 4000),
new Peer("190.177.54.44", 4000),
new Peer("191.177.54.44", 4000),
];
beforeEach(() => {
repository.getPeers = jest.fn().mockReturnValue(peers);
});
afterEach(() => {
jest.clearAllMocks();
repository.getPeers = jest.fn();
});
it("should ping every peer when the peers length is <= <peerCount>", async () => {
await networkMonitor.cleansePeers({ peerCount: 5 });
expect(communicator.ping).toBeCalledTimes(peers.length);
for (const peer of peers) {
expect(communicator.ping).toBeCalledWith(peer, config.verifyTimeout, expect.anything());
}
});
it("should ping every peer when the peers length is <= <peerCount> - when initializing is false", async () => {
// @ts-ignore
networkMonitor.initializing = false;
await networkMonitor.cleansePeers({ peerCount: 5 });
expect(communicator.ping).toBeCalledTimes(peers.length);
for (const peer of peers) {
expect(communicator.ping).toBeCalledWith(peer, config.verifyTimeout, expect.anything());
}
});
it("should ping a max of <peerCount> peers when the peers length is above <peerCount>", async () => {
await networkMonitor.cleansePeers({ peerCount: 2 });
expect(communicator.ping).toBeCalledTimes(2);
});
it("should dispatch 'p2p.internal.disconnectPeer', PeerEvent.Removed, and log the error when ping fails for a peer", async () => {
communicator.ping.mockRejectedValueOnce(new Error("Timeout"));
await networkMonitor.cleansePeers({ peerCount: 5 });
expect(communicator.ping).toBeCalledTimes(peers.length);
expect(emitter.dispatch).toBeCalledTimes(2); // 1 for disconnecting peer + 1 for peer removed event
expect(emitter.dispatch).toBeCalledWith(Enums.PeerEvent.Disconnect, { peer: expect.toBeOneOf(peers) });
expect(emitter.dispatch).toBeCalledWith(Enums.PeerEvent.Removed, expect.toBeOneOf(peers));
});
it("should log the responsive peers count and the median network height when initializing", async () => {
communicator.ping.mockRejectedValueOnce(new Error("Timeout"));
await networkMonitor.cleansePeers({ peerCount: 5 });
expect(communicator.ping).toBeCalledTimes(peers.length);
expect(logger.info).toBeCalledWith("4 of 5 peers on the network are responsive");
expect(logger.info).toBeCalledWith("Median Network Height: 0"); // the peers have no height
});
});
describe("discoverPeers", () => {
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("181.177.54.4", 4000),
new Peer("182.177.54.4", 4000),
new Peer("183.177.54.4", 4000),
new Peer("184.177.54.4", 4000),
new Peer("185.177.54.4", 4000),
new Peer("186.177.54.4", 4000),
new Peer("187.177.54.4", 4000),
new Peer("188.177.54.4", 4000),
new Peer("189.177.54.4", 4000),
];
beforeEach(() => {
repository.getPeers = jest.fn().mockReturnValue(peers);
});
afterEach(() => {
repository.getPeers = jest.fn();
});
it("should get peers from 8 of our peers, and add them to our peers", async () => {
// mocking a timeout for the first peer, should be fine
communicator.getPeers.mockRejectedValueOnce(new Error("timeout"));
// mocking different getPeers return for the other peers in storage
for (let i = 1, peer = peers[1]; i < peers.length; i++, peer = peers[i]) {
communicator.getPeers.mockResolvedValueOnce([
{ ip: `${peer.ip}1${i}`, port: peer.port },
{ ip: `${peer.ip}2${i}`, port: peer.port },
{ ip: `${peer.ip}3${i}`, port: peer.port },
{ ip: `${peer.ip}4${i}`, port: peer.port },
]);
}
await networkMonitor.discoverPeers();
expect(communicator.getPeers).toBeCalledTimes(8);
expect(triggerService.call).toBeCalledTimes(7 * 4); // validateAndAcceptPeer for each peer fetched from the 7 peers
});
describe("when not in pingAll mode + we have more than minimum peers + we have more than 75% of the peers fetched", () => {
it("should not add the peers fetched", async () => {
// mocking different getPeers return for each peer in storage
for (let i = 0, peer = peers[0]; i < peers.length; i++, peer = peers[i]) {
communicator.getPeers.mockResolvedValueOnce([{ ip: `${peer.ip}1${i}`, port: peer.port }]);
}
config.minimumNetworkReach = 5;
await networkMonitor.discoverPeers();
expect(communicator.getPeers).toBeCalledTimes(8);
expect(triggerService.call).toBeCalledTimes(0);
});
});
});
describe("completeColdStart", () => {
beforeEach(() => {
config.networkStart = true;
});
afterEach(() => {
config.networkStart = undefined;
});
it("should set coldStart to false", async () => {
await networkMonitor.updateNetworkStatus(); // setting cold start to true
expect(networkMonitor.isColdStart()).toBeTrue();
networkMonitor.completeColdStart();
expect(networkMonitor.isColdStart()).toBeFalse();
});
});
describe("getNetworkHeight", () => {
it.each([
[6, [5, 6, 6, 6, 7, 8]],
[79, [34, 78, 79, 79, 79, 90]],
])("should return the median height from our peers", (expectedNetworkHeight, peersHeights) => {
const peers = [];
for (let i = 0; i < peersHeights.length; i++) {
const peer = new Peer(`188.185.1.${i}`, 4000);
peer.state.height = peersHeights[i];
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
expect(networkMonitor.getNetworkHeight()).toBe(expectedNetworkHeight);
repository.getPeers = jest.fn();
});
});
describe("getNetworkState", () => {
beforeEach(() => {
process.env.CORE_ENV = "test"; // for NetworkState analyze
repository.getPeers = jest.fn().mockReturnValue([]);
});
afterEach(() => {
delete process.env.CORE_ENV;
repository.getPeers = jest.fn();
});
const block = {
data: {
id: "17882607875259085966",
version: 0,
timestamp: 46583330,
height: 2,
reward: Utils.BigNumber.make("0"),
previousBlock: "17184958558311101492",
numberOfTransactions: 0,
totalAmount: Utils.BigNumber.make("0"),
totalFee: Utils.BigNumber.make("0"),
payloadLength: 0,
payloadHash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
generatorPublicKey: "026c598170201caf0357f202ff14f365a3b09322071e347873869f58d776bfc565",
blockSignature:
"3045022100e7385c6ea42bd950f7f6ab8c8619cf2f66a41d8f8f185b0bc99af032cb25f30d02200b6210176a6cedfdcbe483167fd91c21d740e0e4011d24d679c601fdd46b0de9",
},
transactions: [],
} as Blocks.Block;
it("should call cleansePeers with {fast, forcePing} and return network state from NetworkState.analyze", async () => {
const spyCleansePeers = jest.spyOn(networkMonitor, "cleansePeers");
blockchain.getLastBlock = jest.fn().mockReturnValueOnce(block);
const networkState = await networkMonitor.getNetworkState();
expect(networkState).toBeInstanceOf(NetworkState);
expect(spyCleansePeers).toBeCalledTimes(1);
expect(spyCleansePeers).toBeCalledWith({ fast: true, forcePing: true });
});
});
describe("refreshPeersAfterFork", () => {
beforeEach(() => {
repository.getPeers = jest.fn().mockReturnValue([]);
});
afterEach(() => {
repository.getPeers = jest.fn();
});
it("should call cleansePeers with {forcePing}", async () => {
const spyCleansePeers = jest.spyOn(networkMonitor, "cleansePeers");
await networkMonitor.refreshPeersAfterFork();
expect(spyCleansePeers).toBeCalledTimes(1);
expect(spyCleansePeers).toBeCalledWith({ forcePing: true });
});
});
describe("checkNetworkHealth", () => {
it("should not rollback when there are no verified peers", async () => {
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
];
try {
repository.getPeers.mockReturnValue(peers);
const networkStatus = await networkMonitor.checkNetworkHealth();
expect(networkStatus).toEqual({ forked: false });
} finally {
repository.getPeers.mockReset();
}
});
it("should rollback ignoring peers how are below common height", async () => {
// 105 (4 peers)
// /
// 90 (3 peers) ... 100 ... 103 (2 peers and us)
const lastBlock = { data: { height: 103 } };
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
];
peers[0].verificationResult = new PeerVerificationResult(103, 90, 90);
peers[1].verificationResult = new PeerVerificationResult(103, 90, 90);
peers[2].verificationResult = new PeerVerificationResult(103, 90, 90);
peers[3].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[4].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[5].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[6].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[7].verificationResult = new PeerVerificationResult(103, 103, 103);
peers[8].verificationResult = new PeerVerificationResult(103, 103, 103);
try {
repository.getPeers.mockReturnValue(peers);
stateStore.getLastBlock.mockReturnValue(lastBlock);
const networkStatus = await networkMonitor.checkNetworkHealth();
expect(networkStatus).toEqual({ forked: true, blocksToRollback: 3 });
} finally {
repository.getPeers.mockReset();
stateStore.getLastBlock.mockReset();
}
});
it("should rollback ignoring peers how are at common height", async () => {
// 105 (4 peers)
// /
// 100 (3 peers) ... 103 (2 peers and us)
const lastBlock = { data: { height: 103 } };
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
];
peers[0].verificationResult = new PeerVerificationResult(103, 100, 100);
peers[1].verificationResult = new PeerVerificationResult(103, 100, 100);
peers[2].verificationResult = new PeerVerificationResult(103, 100, 100);
peers[3].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[4].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[5].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[6].verificationResult = new PeerVerificationResult(103, 105, 100);
peers[7].verificationResult = new PeerVerificationResult(103, 103, 103);
peers[8].verificationResult = new PeerVerificationResult(103, 103, 103);
try {
repository.getPeers.mockReturnValue(peers);
stateStore.getLastBlock.mockReturnValue(lastBlock);
const networkStatus = await networkMonitor.checkNetworkHealth();
expect(networkStatus).toEqual({ forked: true, blocksToRollback: 3 });
} finally {
repository.getPeers.mockReset();
stateStore.getLastBlock.mockReset();
}
});
it("should not rollback although most peers are forked", async () => {
// 47 (1 peer) 47 (3 peers) 47 (3 peers)
// / / /
// 12 ........... 31 ........... 35 ... 43 (3 peers and us)
const lastBlock = { data: { height: 103 } };
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
new Peer("180.177.54.4", 4000),
];
peers[0].verificationResult = new PeerVerificationResult(43, 47, 12);
peers[1].verificationResult = new PeerVerificationResult(43, 47, 31);
peers[2].verificationResult = new PeerVerificationResult(43, 47, 31);
peers[3].verificationResult = new PeerVerificationResult(43, 47, 31);
peers[4].verificationResult = new PeerVerificationResult(43, 47, 35);
peers[5].verificationResult = new PeerVerificationResult(43, 47, 35);
peers[6].verificationResult = new PeerVerificationResult(43, 47, 35);
peers[7].verificationResult = new PeerVerificationResult(43, 47, 43);
peers[8].verificationResult = new PeerVerificationResult(43, 47, 43);
peers[9].verificationResult = new PeerVerificationResult(43, 47, 43);
try {
repository.getPeers.mockReturnValue(peers);
stateStore.getLastBlock.mockReturnValue(lastBlock);
const networkStatus = await networkMonitor.checkNetworkHealth();
expect(networkStatus).toEqual({ forked: false });
} finally {
repository.getPeers.mockReset();
stateStore.getLastBlock.mockReset();
}
});
});
describe("downloadBlocksFromHeight", () => {
afterEach(() => {
communicator.getPeerBlocks = jest.fn();
repository.getPeers = jest.fn();
});
const downloadChunkSize = 400;
const maxParallelDownloads = 25;
const baseHeight = 50000;
const expectedBlocksFromHeight = (height) => {
const blocks = [];
for (let i = 0; i < maxParallelDownloads * downloadChunkSize; i++) {
blocks.push({ height: height + 1 + i });
}
return blocks;
};
const mockedGetPeerBlocks = (peer, { fromBlockHeight }) => {
if (fromBlockHeight + 1 === baseHeight) {
throw new Error(`Cannot download blocks, deliberate error`);
}
return expectedBlocksFromHeight(fromBlockHeight).slice(0, downloadChunkSize);
};
it("should return empty array and log an error when we have zero peer", async () => {
repository.getPeers = jest.fn().mockReturnValue([]);
expect(await networkMonitor.downloadBlocksFromHeight(1)).toEqual([]);
expect(logger.error).toBeCalledTimes(1);
expect(logger.error).toBeCalledWith("Could not download blocks: we have 0 peers");
});
it("should return empty array and log an error when all our peers are forked", async () => {
const peer = new Peer("1.1.1.1", 4000);
peer.state = { height: 4, currentSlot: 4, forgingAllowed: true, header: {} };
peer.verificationResult = new PeerVerificationResult(3, 4, 2);
repository.getPeers = jest.fn().mockReturnValue([peer]);
expect(await networkMonitor.downloadBlocksFromHeight(1, maxParallelDownloads)).toEqual([]);
expect(logger.error).toBeCalledTimes(1);
expect(logger.error).toBeCalledWith(
"Could not download blocks: We have 1 peer(s) but all of them are on a different chain than us",
);
});
it("should download blocks from 1 peer", async () => {
const mockBlock = { id: "123456" };
communicator.getPeerBlocks = jest.fn().mockReturnValue([mockBlock]);
const peer = new Peer("1.1.1.1", 4000);
peer.state = { height: 2, currentSlot: 2, forgingAllowed: true, header: {} };
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
repository.getPeers = jest.fn().mockReturnValue([peer]);
expect(await networkMonitor.downloadBlocksFromHeight(1, maxParallelDownloads)).toEqual([mockBlock]);
});
it("should download blocks from 1 peer - peer returns zero blocks", async () => {
communicator.getPeerBlocks = jest.fn().mockReturnValue([]);
const peer = new Peer("1.1.1.1", 4000);
peer.state = { height: 2, currentSlot: 2, forgingAllowed: true, header: {} };
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
repository.getPeers = jest.fn().mockReturnValue([peer]);
expect(await networkMonitor.downloadBlocksFromHeight(1, maxParallelDownloads)).toEqual([]);
});
it("should download blocks in parallel from N peers max", async () => {
communicator.getPeerBlocks = jest.fn().mockImplementation(mockedGetPeerBlocks);
const peers = [];
for (let i = 0; i < maxParallelDownloads + 5; i++) {
const peer = new Peer(`1.1.1.${i}`, 4000);
peer.state = { height: 12500, currentSlot: 2, forgingAllowed: true, header: {} };
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
const fromHeight = 1;
const downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(fromHeight, maxParallelDownloads);
const expectedBlocks = expectedBlocksFromHeight(fromHeight);
expect(downloadedBlocks).toEqual(expectedBlocks);
});
it("should download blocks in parallel from all peers if less than N peers", async () => {
communicator.getPeerBlocks = jest.fn().mockImplementation(mockedGetPeerBlocks);
const numPeers = maxParallelDownloads - 7;
const peers = [];
for (let i = 0; i < numPeers; i++) {
const peer = new Peer(`1.1.1.${i}`, 4000);
peer.state = { height: 12500, currentSlot: 2, forgingAllowed: true, header: {} };
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
const fromHeight = 1;
const downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(fromHeight, maxParallelDownloads);
const expectedBlocks = expectedBlocksFromHeight(fromHeight).slice(0, numPeers * downloadChunkSize);
expect(downloadedBlocks).toEqual(expectedBlocks);
});
it("should handle when getPeerBlocks throws", async () => {
const mockFn = jest.fn().mockImplementation(mockedGetPeerBlocks);
communicator.getPeerBlocks = mockFn;
const numPeers = 5;
const peers = [];
for (let i = 0; i < numPeers; i++) {
const peer = new Peer(`1.1.1.${i}`, 4000);
peer.state = {
height: baseHeight + numPeers * downloadChunkSize,
currentSlot: 2,
forgingAllowed: true,
header: {},
};
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
const chunksToDownloadBeforeThrow = 2;
let fromHeight = baseHeight - 1 - chunksToDownloadBeforeThrow * downloadChunkSize;
let downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(fromHeight, maxParallelDownloads);
let expectedBlocks = expectedBlocksFromHeight(fromHeight).slice(
0,
chunksToDownloadBeforeThrow * downloadChunkSize,
);
expect(downloadedBlocks).toEqual(expectedBlocks);
// when downloading the chunk triggering the throw, it will try to download from all the other peers
// (so it will try (numPeers - 1) more times)
expect(mockFn.mock.calls.length).toEqual(numPeers + (numPeers - 1));
for (let i = 0; i < numPeers; i++) {
if (i >= chunksToDownloadBeforeThrow && i < chunksToDownloadBeforeThrow + numPeers) {
expect(mockFn.mock.calls[i][1].fromBlockHeight).toEqual(
fromHeight + chunksToDownloadBeforeThrow * downloadChunkSize,
);
} else {
expect(mockFn.mock.calls[i][1].fromBlockHeight).toEqual(fromHeight + i * downloadChunkSize);
}
}
// See that the downloaded higher 2 chunks would be returned from the cache.
mockFn.mock.calls = [];
fromHeight = baseHeight - 1 + downloadChunkSize;
downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(fromHeight, maxParallelDownloads);
expectedBlocks = expectedBlocksFromHeight(fromHeight).slice(0, numPeers * downloadChunkSize);
expect(downloadedBlocks).toEqual(expectedBlocks);
const numFailedChunks = 1;
const numCachedChunks = numPeers - chunksToDownloadBeforeThrow - numFailedChunks;
expect(mockFn.mock.calls.length).toEqual(numPeers - numCachedChunks);
for (let i = 0; i < numPeers - numCachedChunks; i++) {
expect(mockFn.mock.calls[i][1].fromBlockHeight).toEqual(
fromHeight + (i + numCachedChunks) * downloadChunkSize,
);
}
});
it("should handle when getPeerBlocks always throws", async () => {
communicator.getPeerBlocks = jest.fn().mockRejectedValue("always throwing");
const numPeers = 5;
const baseHeight = 10000;
const peers = [];
for (let i = 0; i < numPeers; i++) {
const peer = new Peer(`1.1.1.${i}`, 4000);
peer.state = {
height: baseHeight + numPeers * downloadChunkSize,
currentSlot: 2,
forgingAllowed: true,
header: {},
};
peer.verificationResult = { forked: false, hisHeight: 2, myHeight: 2, highestCommonHeight: 2 };
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
const chunksToDownload = 2;
const fromHeight = baseHeight - 1 - chunksToDownload * downloadChunkSize;
const downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(fromHeight, maxParallelDownloads);
expect(downloadedBlocks).toEqual([]);
});
it("should still download blocks from 1 peer if network height === our height", async () => {
const mockBlock = { id: "123456" };
communicator.getPeerBlocks = jest.fn().mockReturnValue([mockBlock]);
const peer = new Peer("1.1.1.1", 4000);
peer.state = { height: 20, currentSlot: 2, forgingAllowed: true, header: {} };
peer.verificationResult = { forked: false, hisHeight: 20, myHeight: 20, highestCommonHeight: 20 };
repository.getPeers = jest.fn().mockReturnValue([peer]);
expect(await networkMonitor.downloadBlocksFromHeight(20, maxParallelDownloads)).toEqual([mockBlock]);
});
it("should reduce download block chunk size after receiving no block", async () => {
const chunkCache = container.get<ChunkCache>(Container.Identifiers.PeerChunkCache);
chunkCache.has = jest.fn().mockReturnValue(false);
communicator.getPeerBlocks = jest.fn().mockReturnValue([]);
const numPeers = maxParallelDownloads;
const peers = [];
for (let i = 0; i < maxParallelDownloads; i++) {
const peer = new Peer(`1.1.1.${i}`, 4000);
peer.state = { height: 1, currentSlot: 1, forgingAllowed: true, header: {} };
peer.state = {
height: numPeers * downloadChunkSize,
currentSlot: 2,
forgingAllowed: true,
header: {},
};
peer.verificationResult = { forked: false, hisHeight: 1, myHeight: 1, highestCommonHeight: 1 };
peers.push(peer);
}
repository.getPeers = jest.fn().mockReturnValue(peers);
const fromHeight = 1;
// first step, peers won't return any block: chunk size should be reduced by factor 10 for next download
for (const expectedBlockLimit of [400, 40, 4, 1, 1, 1]) {
// @ts-ignore
communicator.getPeerBlocks.mockReset();
const downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(
fromHeight,
maxParallelDownloads,
);
expect(downloadedBlocks).toEqual([]);
// getPeerBlocks fails every time for every peer, so it will try for each peer
// from all the other peers before reducing chunk size
expect(communicator.getPeerBlocks).toBeCalledTimes(numPeers * maxParallelDownloads);
expect(communicator.getPeerBlocks).toBeCalledWith(expect.anything(), {
fromBlockHeight: expect.any(Number),
blockLimit: expectedBlockLimit,
});
}
// second step, peers return blocks: chunk size should be reset to default value (400) for next download
const mockGetPeerBlocks1Block = (_, { fromBlockHeight }) => [expectedBlocksFromHeight(fromBlockHeight)[0]];
for (const expectedBlockLimit of [1, 400]) {
communicator.getPeerBlocks = jest
.fn()
.mockImplementation(expectedBlockLimit === 1 ? mockGetPeerBlocks1Block : mockedGetPeerBlocks);
const downloadedBlocks = await networkMonitor.downloadBlocksFromHeight(
fromHeight,
maxParallelDownloads,
);
const expectedBlocks = expectedBlocksFromHeight(fromHeight).slice(0, numPeers * expectedBlockLimit);
expect(downloadedBlocks).toEqual(expectedBlocks);
expect(communicator.getPeerBlocks).toBeCalledTimes(maxParallelDownloads);
expect(communicator.getPeerBlocks).toBeCalledWith(expect.anything(), {
fromBlockHeight: expect.any(Number),
blockLimit: expectedBlockLimit,
});
}
});
});
describe("broadcastBlock", () => {
// @ts-ignore
const block = {
data: {
id: "17882607875259085966",
version: 0,
timestamp: 46583330,
height: 2,
reward: Utils.BigNumber.make("0"),
previousBlock: "17184958558311101492",
numberOfTransactions: 0,
totalAmount: Utils.BigNumber.make("0"),
totalFee: Utils.BigNumber.make("0"),
payloadLength: 0,
payloadHash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
generatorPublicKey: "026c598170201caf0357f202ff14f365a3b09322071e347873869f58d776bfc565",
blockSignature:
"3045022100e7385c6ea42bd950f7f6ab8c8619cf2f66a41d8f8f185b0bc99af032cb25f30d02200b6210176a6cedfdcbe483167fd91c21d740e0e4011d24d679c601fdd46b0de9",
},
transactions: [],
} as Blocks.Block;
const peers = [
new Peer("180.177.54.4", 4000),
new Peer("181.177.54.4", 4000),
new Peer("182.177.54.4", 4000),
new Peer("183.177.54.4", 4000),
new Peer("184.177.54.4", 4000),
];
beforeEach(() => {
repository.getPeers = jest.fn().mockReturnValue(peers);
});
afterEach(() => {
repository.getPeers = jest.fn();
blockchain.getBlockPing = jest.fn();
});
it.each([[4], [5], [10], [50]])("should not broadcast to any peer when blockPing >= 4", async (count) => {
blockchain.getBlockPing = jest
.fn()
.mockReturnValueOnce({ block: block.data, last: 10900, first: 10200, count });
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(0);
});
it.each([[0], [1], [2], [3]])(
"should broadcast to (4 - blockPing)/4 of our peers when blockPing < 4",
async (count) => {
blockchain.getBlockPing = jest
.fn()
.mockReturnValue({ block: block.data, last: 10900, first: 10200, count });
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(Math.ceil((peers.length * (4 - count)) / 4));
},
);
it.each([[0], [1], [2], [3]])(
"should broadcast to all of our peers when block.id doesnt match blockPing.id",
async (count) => {
const tmpBlock = cloneDeep(block);
tmpBlock.data.id = "random_id";
blockchain.getBlockPing = jest
.fn()
.mockReturnValue({ block: tmpBlock.data, last: 10900, first: 10200, count });
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(5);
},
);
describe("when blockPing.last - blockPing.first < 500ms", () => {
it("should not wait if block is from forger", async () => {
blockchain.getBlockPing = jest
.fn()
.mockReturnValue({ block: block.data, last: 10500, first: 10200, count: 2, fromForger: true });
const spySleep = jest.spyOn(Utils, "sleep");
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(peers.length);
expect(spySleep).toBeCalledTimes(0);
});
it("should wait until 500ms have elapsed between blockPing.last and blockPing.first before broadcasting", async () => {
blockchain.getBlockPing = jest
.fn()
.mockReturnValue({ block: block.data, last: 10500, first: 10200, count: 2 });
const spySleep = jest.spyOn(Utils, "sleep");
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(Math.ceil((peers.length * 1) / 2));
expect(spySleep).toBeCalledTimes(1);
expect(spySleep).toBeCalledWith(200); // 500 - (last - first)
});
it("should not broadcast if during waiting we have received a new block", async () => {
blockchain.getBlockPing = jest
.fn()
.mockReturnValueOnce({ block: block.data, last: 10500, first: 10200, count: 2 })
.mockReturnValueOnce({
block: { ...block.data, id: "11111111", height: 3 },
last: 10500,
first: 10200,
count: 2,
});
const spySleep = jest.spyOn(Utils, "sleep");
await networkMonitor.broadcastBlock(block);
expect(communicator.postBlock).toBeCalledTimes(0);
expect(spySleep).toBeCalledTimes(1);
});
});
});
}); | the_stack |
import * as coreClient from "@azure/core-client";
export const CustomLocationOperationsList: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "CustomLocationOperationsList",
modelProperties: {
nextLink: {
serializedName: "nextLink",
type: {
name: "String"
}
},
value: {
serializedName: "value",
required: true,
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "CustomLocationOperation"
}
}
}
}
}
}
};
export const CustomLocationOperation: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "CustomLocationOperation",
modelProperties: {
isDataAction: {
serializedName: "isDataAction",
readOnly: true,
type: {
name: "Boolean"
}
},
name: {
serializedName: "name",
readOnly: true,
type: {
name: "String"
}
},
origin: {
serializedName: "origin",
readOnly: true,
type: {
name: "String"
}
},
description: {
serializedName: "display.description",
readOnly: true,
type: {
name: "String"
}
},
operation: {
serializedName: "display.operation",
readOnly: true,
type: {
name: "String"
}
},
provider: {
serializedName: "display.provider",
readOnly: true,
type: {
name: "String"
}
},
resource: {
serializedName: "display.resource",
readOnly: true,
type: {
name: "String"
}
}
}
}
};
export const ErrorResponse: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "ErrorResponse",
modelProperties: {
error: {
serializedName: "error",
type: {
name: "Composite",
className: "ErrorDetail"
}
}
}
}
};
export const ErrorDetail: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "ErrorDetail",
modelProperties: {
code: {
serializedName: "code",
readOnly: true,
type: {
name: "String"
}
},
message: {
serializedName: "message",
readOnly: true,
type: {
name: "String"
}
},
target: {
serializedName: "target",
readOnly: true,
type: {
name: "String"
}
},
details: {
serializedName: "details",
readOnly: true,
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ErrorDetail"
}
}
}
},
additionalInfo: {
serializedName: "additionalInfo",
readOnly: true,
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "ErrorAdditionalInfo"
}
}
}
}
}
}
};
export const ErrorAdditionalInfo: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "ErrorAdditionalInfo",
modelProperties: {
type: {
serializedName: "type",
readOnly: true,
type: {
name: "String"
}
},
info: {
serializedName: "info",
readOnly: true,
type: {
name: "Dictionary",
value: { type: { name: "any" } }
}
}
}
}
};
export const CustomLocationListResult: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "CustomLocationListResult",
modelProperties: {
nextLink: {
serializedName: "nextLink",
readOnly: true,
type: {
name: "String"
}
},
value: {
serializedName: "value",
readOnly: true,
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "CustomLocation"
}
}
}
}
}
}
};
export const Resource: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "Resource",
modelProperties: {
id: {
serializedName: "id",
readOnly: true,
type: {
name: "String"
}
},
name: {
serializedName: "name",
readOnly: true,
type: {
name: "String"
}
},
type: {
serializedName: "type",
readOnly: true,
type: {
name: "String"
}
}
}
}
};
export const Identity: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "Identity",
modelProperties: {
principalId: {
serializedName: "principalId",
readOnly: true,
type: {
name: "String"
}
},
tenantId: {
serializedName: "tenantId",
readOnly: true,
type: {
name: "String"
}
},
type: {
serializedName: "type",
type: {
name: "String"
}
}
}
}
};
export const CustomLocationPropertiesAuthentication: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "CustomLocationPropertiesAuthentication",
modelProperties: {
type: {
serializedName: "type",
type: {
name: "String"
}
},
value: {
serializedName: "value",
type: {
name: "String"
}
}
}
}
};
export const SystemData: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "SystemData",
modelProperties: {
createdBy: {
serializedName: "createdBy",
type: {
name: "String"
}
},
createdByType: {
serializedName: "createdByType",
type: {
name: "String"
}
},
createdAt: {
serializedName: "createdAt",
type: {
name: "DateTime"
}
},
lastModifiedBy: {
serializedName: "lastModifiedBy",
type: {
name: "String"
}
},
lastModifiedByType: {
serializedName: "lastModifiedByType",
type: {
name: "String"
}
},
lastModifiedAt: {
serializedName: "lastModifiedAt",
type: {
name: "DateTime"
}
}
}
}
};
export const PatchableCustomLocations: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "PatchableCustomLocations",
modelProperties: {
identity: {
serializedName: "identity",
type: {
name: "Composite",
className: "Identity"
}
},
tags: {
serializedName: "tags",
type: {
name: "Dictionary",
value: { type: { name: "String" } }
}
},
authentication: {
serializedName: "properties.authentication",
type: {
name: "Composite",
className: "CustomLocationPropertiesAuthentication"
}
},
clusterExtensionIds: {
serializedName: "properties.clusterExtensionIds",
type: {
name: "Sequence",
element: {
type: {
name: "String"
}
}
}
},
displayName: {
serializedName: "properties.displayName",
type: {
name: "String"
}
},
hostResourceId: {
serializedName: "properties.hostResourceId",
type: {
name: "String"
}
},
hostType: {
serializedName: "properties.hostType",
type: {
name: "String"
}
},
namespace: {
serializedName: "properties.namespace",
type: {
name: "String"
}
},
provisioningState: {
serializedName: "properties.provisioningState",
type: {
name: "String"
}
}
}
}
};
export const EnabledResourceTypesListResult: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "EnabledResourceTypesListResult",
modelProperties: {
nextLink: {
serializedName: "nextLink",
readOnly: true,
type: {
name: "String"
}
},
value: {
serializedName: "value",
readOnly: true,
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "EnabledResourceType"
}
}
}
}
}
}
};
export const EnabledResourceTypePropertiesTypesMetadataItem: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "EnabledResourceTypePropertiesTypesMetadataItem",
modelProperties: {
apiVersion: {
serializedName: "apiVersion",
type: {
name: "String"
}
},
resourceProviderNamespace: {
serializedName: "resourceProviderNamespace",
type: {
name: "String"
}
},
resourceType: {
serializedName: "resourceType",
type: {
name: "String"
}
}
}
}
};
export const TrackedResource: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "TrackedResource",
modelProperties: {
...Resource.type.modelProperties,
tags: {
serializedName: "tags",
type: {
name: "Dictionary",
value: { type: { name: "String" } }
}
},
location: {
serializedName: "location",
required: true,
type: {
name: "String"
}
}
}
}
};
export const ProxyResource: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "ProxyResource",
modelProperties: {
...Resource.type.modelProperties
}
}
};
export const CustomLocation: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "CustomLocation",
modelProperties: {
...TrackedResource.type.modelProperties,
identity: {
serializedName: "identity",
type: {
name: "Composite",
className: "Identity"
}
},
systemData: {
serializedName: "systemData",
type: {
name: "Composite",
className: "SystemData"
}
},
authentication: {
serializedName: "properties.authentication",
type: {
name: "Composite",
className: "CustomLocationPropertiesAuthentication"
}
},
clusterExtensionIds: {
serializedName: "properties.clusterExtensionIds",
type: {
name: "Sequence",
element: {
type: {
name: "String"
}
}
}
},
displayName: {
serializedName: "properties.displayName",
type: {
name: "String"
}
},
hostResourceId: {
serializedName: "properties.hostResourceId",
type: {
name: "String"
}
},
hostType: {
serializedName: "properties.hostType",
type: {
name: "String"
}
},
namespace: {
serializedName: "properties.namespace",
type: {
name: "String"
}
},
provisioningState: {
serializedName: "properties.provisioningState",
type: {
name: "String"
}
}
}
}
};
export const EnabledResourceType: coreClient.CompositeMapper = {
type: {
name: "Composite",
className: "EnabledResourceType",
modelProperties: {
...ProxyResource.type.modelProperties,
systemData: {
serializedName: "systemData",
type: {
name: "Composite",
className: "SystemData"
}
},
clusterExtensionId: {
serializedName: "properties.clusterExtensionId",
type: {
name: "String"
}
},
extensionType: {
serializedName: "properties.extensionType",
type: {
name: "String"
}
},
typesMetadata: {
serializedName: "properties.typesMetadata",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
className: "EnabledResourceTypePropertiesTypesMetadataItem"
}
}
}
}
}
}
}; | the_stack |
import { useRef, useMemo, useState } from "react";
import { useTheme, createStyles, makeStyles } from "@material-ui/core/styles";
import Editor, { useMonaco } from "@monaco-editor/react";
import { useFiretableContext } from "contexts/FiretableContext";
import { FieldType } from "constants/fields";
const useStyles = makeStyles((theme) =>
createStyles({
editorWrapper: {
position: "relative",
minWidth: 800,
height: "calc(100% - 200px)",
},
resizeIcon: {
position: "absolute",
bottom: 0,
right: 0,
color: theme.palette.text.disabled,
},
saveButton: {
marginTop: theme.spacing(1),
},
editor: {
// overwrite user-select: none that causes editor not focusable in Safari
userSelect: "auto",
},
})
);
export default function CodeEditor(props: any) {
const {
handleChange,
extraLibs,
height = 400,
script,
onValideStatusUpdate,
diagnosticsOptions,
} = props;
const theme = useTheme();
const monacoInstance = useMonaco();
const [initialEditorValue] = useState(script ?? "");
const { tableState } = useFiretableContext();
const classes = useStyles();
const editorRef = useRef<any>();
function handleEditorDidMount(_, editor) {
editorRef.current = editor;
}
const themeTransformer = (theme: string) => {
switch (theme) {
case "dark":
return "vs-dark";
default:
return theme;
}
};
useMemo(async () => {
if (!monacoInstance) {
// useMonaco returns a monaco instance but initialisation is done asynchronously
// dont execute the logic until the instance is initialised
return;
}
const firestoreDefsFile = await fetch(
`${process.env.PUBLIC_URL}/firestore.d.ts`
);
const firebaseAuthDefsFile = await fetch(
`${process.env.PUBLIC_URL}/auth.d.ts`
);
const firebaseStorageDefsFile = await fetch(
`${process.env.PUBLIC_URL}/storage.d.ts`
);
const firestoreDefs = await firestoreDefsFile.text();
const firebaseStorageDefs = await firebaseStorageDefsFile.text();
const firebaseAuthDefs = (await firebaseAuthDefsFile.text())
?.replace("export", "declare")
?.replace("admin.auth", "adminauth");
try {
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
firestoreDefs
);
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
firebaseAuthDefs
);
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
firebaseStorageDefs
);
monacoInstance.languages.typescript.javascriptDefaults.setDiagnosticsOptions(
diagnosticsOptions ?? {
noSemanticValidation: true,
noSyntaxValidation: false,
}
);
// compiler options
monacoInstance.languages.typescript.javascriptDefaults.setCompilerOptions(
{
target: monacoInstance.languages.typescript.ScriptTarget.ES2020,
allowNonTsExtensions: true,
}
);
if (extraLibs) {
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
extraLibs.join("\n"),
"ts:filename/extraLibs.d.ts"
);
}
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
[
" /**",
" * utility functions",
" */",
`
declare namespace utilFns {
/**
* Sends out an email through sendGrid
*/
function sendEmail(msg: {
from: string;
templateId: string;
personalizations: { to: string; dynamic_template_data: any }[];
}): void {}
/**
* Gets the secret defined in Google Cloud Secret
*/
async function getSecret(name: string, v?: string): any {}
/**
* Async version of forEach
*/
async function asyncForEach(array: any[], callback: Function): void {}
/**
* Generate random ID from numbers and English charactors inlcuding lowercase and uppercase
*/
function generateId(): string {}
/**
* Add an item to an array field
*/
function arrayUnion(val: string): void {}
/**
* Remove an item to an array field
*/
function arrayRemove(val: string): void {}
/**
* Increment a number field
*/
function increment(val: number): void {}
function hasRequiredFields(requiredFields: string[], data: any): boolean {}
function hasAnyRole(
authorizedRoles: string[],
context: functions.https.CallableContext
): boolean {}
}
`,
].join("\n"),
"ts:filename/utils.d.ts"
);
const rowDefinition = [
...Object.keys(tableState?.columns!).map((columnKey: string) => {
const column = tableState?.columns[columnKey];
switch (column.type) {
case FieldType.shortText:
case FieldType.longText:
case FieldType.email:
case FieldType.phone:
case FieldType.code:
return `${columnKey}:string`;
case FieldType.singleSelect:
const typeString = [
...(column.config?.options?.map((opt) => `"${opt}"`) ?? []),
].join(" | ");
return `${columnKey}:${typeString}`;
case FieldType.multiSelect:
return `${columnKey}:string[]`;
case FieldType.checkbox:
return `${columnKey}:boolean`;
default:
return `${columnKey}:any`;
}
}),
].join(";\n");
const availableFields = Object.keys(tableState?.columns!)
.map((columnKey: string) => `"${columnKey}"`)
.join("|\n");
const sparksDefinition = `declare namespace sparks {
// basic types that are used in all places
type Row = {${rowDefinition}};
type Field = ${availableFields} | string | object;
type Fields = Field[];
type Trigger = "create" | "update" | "delete";
type Triggers = Trigger[];
// the argument that the spark body takes in
type SparkContext = {
row: Row;
ref:FirebaseFirestore.DocumentReference;
storage:firebasestorage.Storage;
db:FirebaseFirestore.Firestore;
auth:adminauth.BaseAuth;
change: any;
triggerType: Triggers;
sparkConfig: any;
utilFns: any;
}
// function types that defines spark body and shuold run
type ShouldRun = boolean | ((data: SparkContext) => boolean | Promise<any>);
type ContextToString = ((data: SparkContext) => string | Promise<any>);
type ContextToStringList = ((data: SparkContext) => string[] | Promise<any>);
type ContextToObject = ((data: SparkContext) => object | Promise<any>);
type ContextToObjectList = ((data: SparkContext) => object[] | Promise<any>);
type ContextToRow = ((data: SparkContext) => Row | Promise<any>);
type ContextToAny = ((data: SparkContext) => any | Promise<any>);
// different types of bodies that slack message can use
type slackEmailBody = {
channels?: ContextToStringList;
text?: ContextToString;
emails: ContextToStringList;
blocks?: ContextToObjectList;
attachments?: ContextToAny;
}
type slackChannelBody = {
channels: ContextToStringList;
text?: ContextToString;
emails?: ContextToStringList;
blocks?: ContextToObjectList;
attachments?: ContextToAny;
}
// different types of sparks
type docSync = {
label?:string;
type: "docSync";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
fieldsToSync: Fields;
row: ContextToRow;
targetPath: ContextToString;
}
};
type historySnapshot = {
label?:string;
type: "historySnapshot";
triggers: Triggers;
shouldRun: ShouldRun;
sparkBody: {
trackedFields: Fields;
}
}
type algoliaIndex = {
label?:string;
type: "algoliaIndex";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
fieldsToSync: Fields;
index: string;
row: ContextToRow;
objectID: ContextToString;
}
}
type meiliIndex = {
type: "meiliIndex";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
fieldsToSync: Fields;
index: string;
row: ContextToRow;
objectID: ContextToString;
}
}
type bigqueryIndex = {
type: "bigqueryIndex";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
fieldsToSync: Fields;
index: string;
row: ContextToRow;
objectID: ContextToString;
}
}
type slackMessage = {
label?:string;
type: "slackMessage";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: slackEmailBody | slackChannelBody;
}
type sendgridEmail = {
label?:string;
type: "sendgridEmail";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
msg: ContextToAny;
}
}
type apiCall = {
label?:string;
type: "apiCall";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
body: ContextToString;
url: ContextToString;
method: ContextToString;
callback: ContextToAny;
}
}
type twilioMessage = {
label?:string;
type: "twilioMessage";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
body: ContextToAny;
from: ContextToAny;
to: ContextToAny;
}
}
type task = {
label?:string;
type: "task";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
promises: ContextToAny;
}
}
type mailchimp = {
label?:string;
type: "mailchimp";
triggers: Triggers;
shouldRun: ShouldRun;
requiredFields?: Fields;
sparkBody: {
method: any;
path: any;
body: any;
}
}
// an individual spark
type Spark =
| docSync
| historySnapshot
| algoliaIndex
| meiliIndex
| bigqueryIndex
| slackMessage
| sendgridEmail
| apiCall
| twilioMessage
| mailchimp
| task;
type Sparks = Spark[]
// use spark.config(sparks) in the code editor for static type check
function config(sparks: Sparks): void;
}`;
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
[
" /**",
" * sparks type configuration",
" */",
sparksDefinition,
].join("\n"),
"ts:filename/sparks.d.ts"
);
monacoInstance.languages.typescript.javascriptDefaults.addExtraLib(
[
" declare var require: any;",
" declare var Buffer: any;",
" const ref:FirebaseFirestore.DocumentReference",
" const storage:firebasestorage.Storage",
" const db:FirebaseFirestore.Firestore;",
" const auth:adminauth.BaseAuth;",
"declare class row {",
" /**",
" * Returns the row fields",
" */",
...Object.keys(tableState?.columns!).map((columnKey: string) => {
const column = tableState?.columns[columnKey];
switch (column.type) {
case FieldType.shortText:
case FieldType.longText:
case FieldType.email:
case FieldType.phone:
case FieldType.code:
return `static ${columnKey}:string`;
case FieldType.singleSelect:
const typeString = [
...(column.config?.options?.map((opt) => `"${opt}"`) ?? []),
// "string",
].join(" | ");
return `static ${columnKey}:${typeString}`;
case FieldType.multiSelect:
return `static ${columnKey}:string[]`;
case FieldType.checkbox:
return `static ${columnKey}:boolean`;
default:
return `static ${columnKey}:any`;
}
}),
"}",
].join("\n"),
"ts:filename/rowFields.d.ts"
);
} catch (error) {
console.error(
"An error occurred during initialization of Monaco: ",
error
);
}
}, [tableState?.columns, monacoInstance]);
function handleEditorValidation(markers) {
if (onValideStatusUpdate) {
onValideStatusUpdate({
isValid: markers.length <= 0,
});
}
}
return (
<>
<div className={classes.editorWrapper}>
<Editor
theme={themeTransformer(theme.palette.type)}
onMount={handleEditorDidMount}
language="javascript"
height={height}
value={initialEditorValue}
onChange={handleChange}
onValidate={handleEditorValidation}
className={classes.editor}
/>
</div>
</>
);
} | the_stack |
import isEqual from 'lodash/isEqual';
import Vue from 'vue';
export interface LayoutItem {
w: number;
h: number;
x: number;
y: number;
i: string;
minW?: number;
minH?: number;
maxW?: number;
maxH?: number;
moved?: boolean;
immobile?: boolean;
isDraggable?: boolean;
isResizable?: boolean;
}
export type Layout = LayoutItem[];
export interface Position {
left: number;
top: number;
width: number;
height: number;
}
export interface DraggableCallbackData {
node: HTMLElement;
x: number;
y: number;
deltaX: number;
deltaY: number;
lastX: number;
lastY: number;
}
export interface PartialPosition {
left: number;
top: number;
}
export interface Size {
width: number;
height: number;
}
export interface GridDragEvent {
e: Event;
node: HTMLElement;
newPosition: PartialPosition;
}
export interface GridResizeEvent {
e: Event;
node: HTMLElement;
size: Size;
}
export type VueChildren = Vue[];
// All callbacks are of the signature (layout, oldItem, newItem, placeholder, e).
export type EventCallback = (
layout: Layout,
oldItem: LayoutItem | null,
newItem: LayoutItem | null,
placeholder: LayoutItem | null,
Event,
HTMLElement: HTMLElement | null,
) => void;
export type CompactType = 'horizontal' | 'vertical';
export interface QueueJob {
job: string;
params?: any;
}
const isProduction = process.env.NODE_ENV === 'production';
const DEBUG = false;
/**
* Return the bottom coordinate of the layout.
*
* @param {Array} layout Layout array.
* @return {Number} Bottom coordinate.
*/
export function bottom(layout: Layout): number {
let max = 0;
let bottomY = 0;
for (let i = 0, len = layout.length; i < len; i++) {
bottomY = layout[i].y + layout[i].h;
if (bottomY > max) {
max = bottomY;
}
}
return max;
}
export function cloneLayout(layout: Layout): Layout {
const newLayout = Array(layout.length);
for (let i = 0, len = layout.length; i < len; i++) {
newLayout[i] = cloneLayoutItem(layout[i]);
}
return newLayout;
}
// Fast path to cloning, since this is monomorphic
export function cloneLayoutItem(layoutItem: LayoutItem): LayoutItem {
return {
w: layoutItem.w,
h: layoutItem.h,
x: layoutItem.x,
y: layoutItem.y,
i: layoutItem.i,
minW: layoutItem.minW,
maxW: layoutItem.maxW,
minH: layoutItem.minH,
maxH: layoutItem.maxH,
moved: Boolean(layoutItem.moved),
immobile: Boolean(layoutItem.immobile),
// These can be null
isDraggable: layoutItem.isDraggable,
isResizable: layoutItem.isResizable,
};
}
/**
* Comparing React `children` is a bit difficult. This is a good way to compare them.
* This will catch differences in keys, order, and length.
*/
export function childrenEqual(a: Vue, b: Vue): boolean {
return isEqual(a, b);
}
/**
* Given two layoutitems, check if they collide.
*/
export function collides(l1: LayoutItem, l2: LayoutItem): boolean {
if (l1.i === l2.i) {
return false; // same element
}
if (l1.x + l1.w <= l2.x) {
return false; // l1 is left of l2
}
if (l1.x >= l2.x + l2.w) {
return false; // l1 is right of l2
}
if (l1.y + l1.h <= l2.y) {
return false; // l1 is above l2
}
if (l1.y >= l2.y + l2.h) {
return false; // l1 is below l2
}
return true; // boxes overlap
}
/**
* Given a layout, compact it. This involves going down each y coordinate and removing gaps
* between items.
*
* @param {Array} layout Layout.
* @param {CompactType} compactType Whether or not to compact the layout
* vertically.
* @param {number} cols number of columns
* @return {Array} Compacted Layout.
*/
export function compact(
layout: Layout,
compactType: CompactType,
cols: number,
): Layout {
// Statics go in the compareWith array right away so items flow around them.
const compareWith = getStatics(layout);
// We go through the items by row and column.
const sorted = sortLayoutItems(layout, compactType);
// Holding for new items.
const out = Array(layout.length);
for (let i = 0, len = sorted.length; i < len; i++) {
let l = cloneLayoutItem(sorted[i]);
// Don't move immobile elements
if (!l.immobile) {
l = compactItem(compareWith, l, compactType, cols, sorted);
// Add to comparison array. We only collide with items before this one.
// Statics are already in this array.
compareWith.push(l);
}
// Add to output array to make sure they still come out in the right order.
out[layout.indexOf(sorted[i])] = l;
// Clear moved flag, if it exists.
l.moved = false;
}
return out;
}
const heightWidth = { x: 'w', y: 'h' };
/**
* Before moving item down, it will check if the movement will cause collisions and move those items down before.
*/
function resolveCompactionCollision(
layout: Layout,
item: LayoutItem,
moveToCoord: number,
axis: 'x' | 'y',
) {
const sizeProp = heightWidth[axis];
item[axis] += 1;
const itemIndex = layout
.map((layoutItem) => {
return layoutItem.i;
})
.indexOf(item.i);
// Go through each item we collide with.
for (let i = itemIndex + 1; i < layout.length; i++) {
const otherItem = layout[i];
// Ignore immobile items
if (otherItem.immobile) {
continue;
}
// Optimization: we can break early if we know we're past this el
// We can do this b/c it's a sorted layout
if (otherItem.y > (item.y + item.h)) {
break;
}
if (collides(item, otherItem)) {
resolveCompactionCollision(
layout,
otherItem,
moveToCoord + item[sizeProp],
axis,
);
}
}
item[axis] = moveToCoord;
}
/**
* Compact an item in the layout.
*/
export function compactItem(
compareWith: Layout,
l: LayoutItem,
compactType: CompactType,
cols: number,
fullLayout: Layout,
): LayoutItem {
const compactV = compactType === 'vertical';
const compactH = compactType === 'horizontal';
if (compactV) {
// Bottom 'y' possible is the bottom of the layout.
// This allows you to do nice stuff like specify {y: Infinity}
// This is here because the layout must be sorted in order to get the correct bottom `y`.
l.y = Math.min(bottom(compareWith), l.y);
// Move the element up as far as it can go without colliding.
while (l.y > 0 && !getFirstCollision(compareWith, l)) {
l.y--;
}
} else if (compactH) {
l.y = Math.min(bottom(compareWith), l.y);
// Move the element left as far as it can go without colliding.
while (l.x > 0 && !getFirstCollision(compareWith, l)) {
l.x--;
}
}
// Move it down, and keep moving it down if it's colliding.
while (getFirstCollision(compareWith, l)) {
if (compactH) {
resolveCompactionCollision(fullLayout, l, getFirstCollision(compareWith, l).x
+ getFirstCollision(compareWith, l).w, 'x');
} else {
resolveCompactionCollision(fullLayout, l, getFirstCollision(compareWith, l).y
+ getFirstCollision(compareWith, l).h, 'y');
}
// Since we can't grow without bounds horizontally, if we've overflown, let's move it down and try again.
if (compactH && l.x + l.w > cols) {
l.x = cols - l.w;
l.y++;
}
}
return l;
}
/**
* Given a layout, make sure all elements fit within its bounds.
*
* @param {Array} layout Layout array.
* @param {Number} bounds Number of columns.
*/
export function correctBounds(
layout: Layout,
bounds: { cols: number },
): Layout {
const collidesWith = getStatics(layout);
for (let i = 0, len = layout.length; i < len; i++) {
const l = layout[i];
// Overflows right
if (l.x + l.w > bounds.cols) {
l.x = bounds.cols - l.w;
}
// Overflows left
if (l.x < 0) {
l.x = 0;
l.w = bounds.cols;
}
if (!l.immobile) {
collidesWith.push(l);
} else {
// If this is immobile and collides with other immobiles, we must move it down.
// We have to do something nicer than just letting them overlap.
while (getFirstCollision(collidesWith, l)) {
l.y++;
}
}
}
return layout;
}
/**
* Get a layout item by ID. Used so we can override later on if necessary.
*
* @param {Array<Layout>} layout Layout array.
* @param {String} id ID
* @return {LayoutItem} Item at ID.
*/
export function getLayoutItem(layout: Layout, id: string): LayoutItem | null {
for (let i = 0, len = layout.length; i < len; i++) {
if (layout[i].i === id) {
return layout[i];
}
}
}
/**
* Returns the first item this layout collides with.
* It doesn't appear to matter which order we approach this from, although
* perhaps that is the wrong thing to do.
*
* @param {Layout} layout
* @param {LayoutItem} layoutItem Layout item.
* @return {Object|null} A colliding layout item, or undefined.
*/
export function getFirstCollision(
layout: Layout,
layoutItem: LayoutItem,
): LayoutItem | undefined {
for (let i = 0, len = layout.length; i < len; i++) {
if (collides(layout[i], layoutItem)) {
return layout[i];
}
}
}
export function getAllCollisions(
layout: Layout,
layoutItem: LayoutItem,
): LayoutItem[] {
return layout.filter((l) => collides(l, layoutItem));
}
/**
* Get all immobile elements.
* @param {Array} layout Array of layout objects.
* @return {Array} Array of immobile layout items..
*/
export function getStatics(layout: Layout): LayoutItem[] {
return layout.filter((l) => l.immobile);
}
/**
* Move an element. Responsible for doing cascading movements of other elements.
*
* @param {Layout} layout Full layout to modify.
* @param {VueChildren} children Children of layout
* @param {LayoutItem} l element to move.
* @param {Number} [x] X position in grid units.
* @param {Number} [y] Y position in grid units.
* @param isUserAction
* @param preventCollision
* @param compactType
* @param cols
*/
export function moveElement(
layout: Layout,
children: VueChildren,
l: LayoutItem,
x: number,
y: number,
isUserAction: boolean,
preventCollision: boolean,
compactType: CompactType,
cols: number,
): Layout {
if (children) {
const index = children.findIndex( (item) => item.$props.i === l.i);
if (index >= 0) {
if (children[index].$props.immobile === true) {
return layout;
}
}
}
if (l.immobile) {
return layout;
}
// Short-circuit if nothing to do.
if (l.y === y && l.x === x) {
return layout;
}
log(`Moving element ${l.i} to [${String(x)},${String(y)}] from [${l.x},${l.y}]`);
const oldX = l.x;
const oldY = l.y;
// This is quite a bit faster than extending the object
if (typeof x === 'number') {
l.x = x;
}
if (typeof y === 'number') {
l.y = y;
}
l.moved = true;
// If this collides with anything, move it.
// When doing this comparison, we have to sort the items we compare with
// to ensure, in the case of multiple collisions, that we're getting the
// nearest collision.
let sorted = sortLayoutItems(layout, compactType);
const movingUp =
compactType === 'vertical' && typeof y === 'number' ? oldY >= y
: compactType === 'horizontal' && typeof x === 'number' ? oldX >= x
: false;
if (movingUp) {
sorted = sorted.reverse();
}
const collisions = getAllCollisions(sorted, l);
// There was a collision; abort
if (preventCollision && collisions.length) {
log(`Collision prevented on ${l.i}, reverting.`);
l.x = oldX;
l.y = oldY;
l.moved = false;
return layout;
}
// Move each item that collides away from this element.
for (let i = 0, len = collisions.length; i < len; i++) {
const collision = collisions[i];
log(
`Resolving collision between ${l.i} at [${l.x},${l.y}] and ${
collision.i
} at [${collision.x},${collision.y}]`,
);
// Short circuit so we can't infinite loop
if (collision.moved) {
continue;
}
// Don't move immobile items - we have to move *this* element away
if (collision.immobile) {
layout = moveElementAwayFromCollision(
layout,
children,
collision,
l,
isUserAction,
compactType,
cols,
);
} else {
layout = moveElementAwayFromCollision(
layout,
children,
l,
collision,
isUserAction,
compactType,
cols,
);
}
}
return layout;
}
/**
* This is where the magic needs to happen - given a collision, move an element away from the collision.
* We attempt to move it up if there's room, otherwise it goes below.
*
* @param {Array} layout Full layout to modify.
* @param {VueChildren} children Children of layout
* @param {LayoutItem} collidesWith Layout item we're colliding with.
* @param {LayoutItem} itemToMove Layout item we're moving.
* @param isUserAction
* @param compactType
* @param cols
* @return {Layout} Layout
*/
export function moveElementAwayFromCollision(
layout: Layout,
children: VueChildren,
collidesWith: LayoutItem,
itemToMove: LayoutItem,
isUserAction: boolean,
compactType: CompactType,
cols: number,
): Layout {
const compactH = compactType === 'horizontal';
// Compact vertically if not set to horizontal
const compactV = compactType !== 'horizontal';
const preventCollision = false; // we're already colliding
// If there is enough space above the collision to put this element, move it there.
// We only do this on the main collision as this can get funky in cascades and cause
// unwanted swapping behavior.
if (isUserAction) {
// Reset isUserAction flag because we're not in the main collision anymore.
isUserAction = false;
// Make a mock item so we don't modify the item here, only modify in moveElement.
const fakeItem: LayoutItem = {
x: compactH ? Math.max(collidesWith.x - itemToMove.w, 0) : itemToMove.x,
y: compactV ? Math.max(collidesWith.y - itemToMove.h, 0) : itemToMove.y,
w: itemToMove.w,
h: itemToMove.h,
i: '-1',
};
// No collision? If so, we can go up there; otherwise, we'll end up moving down as normal
if (!getFirstCollision(layout, fakeItem)) {
log(
`Doing reverse collision on ${itemToMove.i} up to [${fakeItem.x},${
fakeItem.y
}].`,
);
return moveElement(
layout,
children,
itemToMove,
compactH ? fakeItem.x : undefined,
compactV ? fakeItem.y : undefined,
isUserAction,
preventCollision,
compactType,
cols,
);
}
}
return moveElement(
layout,
children,
itemToMove,
compactH ? itemToMove.x + 1 : undefined,
compactV ? itemToMove.y + 1 : undefined,
isUserAction,
preventCollision,
compactType,
cols,
);
}
/**
* Helper to convert a number to a percentage string.
*
* @param {Number} num Any number
* @return {String} That number as a percentage.
*/
export function perc(num: number): string {
return num * 100 + '%';
}
export function setTransform({ top, left, width, height }: Position): object {
// Replace unitless items with px
const translate = `translate(${left}px,${top}px)`;
return {
transform: translate,
WebkitTransform: translate,
MozTransform: translate,
msTransform: translate,
OTransform: translate,
width: `${width}px`,
height: `${height}px`,
position: 'absolute',
};
}
export function setTopLeft({ top, left, width, height }: Position): object {
return {
top: `${top}px`,
left: `${left}px`,
width: `${width}px`,
height: `${height}px`,
position: 'absolute',
};
}
/**
* Get layout items sorted from top left to right and down.
*
* @return {Array} Array of layout objects.
* @return {Array} Layout, sorted immobile items first.
*/
export function sortLayoutItems(
layout: Layout,
compactType: CompactType,
): Layout {
if (compactType === 'horizontal') {
return sortLayoutItemsByColRow(layout);
} else {
return sortLayoutItemsByRowCol(layout);
}
}
export function sortLayoutItemsByRowCol(layout: Layout): Layout {
return [].concat(layout).sort((a, b) => {
if (a.y > b.y || (a.y === b.y && a.x > b.x)) {
return 1;
} else if (a.y === b.y && a.x === b.x) {
// Without this, we can get different sort results in IE vs. Chrome/FF
return 0;
}
return -1;
});
}
export function sortLayoutItemsByColRow(layout: Layout): Layout {
return [].concat(layout).sort((a, b) => {
if (a.x > b.x || (a.x === b.x && a.y > b.y)) {
return 1;
}
return -1;
});
}
/**
* Generate a layout using the initialLayout and children as a template.
* Missing entries will be added, extraneous ones will be truncated.
*
* @param {Array} initialLayout Layout passed in through props.
* @param {VueChildren} children Children of parent layout
* @param {number} cols Current columns number.
* @param {CompactType} compactType Compaction option.
* @return {Array} Working layout.
*/
export function synchronizeLayoutWithChildren(
initialLayout: Layout,
children: VueChildren,
cols: number,
compactType: CompactType,
): Layout {
initialLayout = initialLayout || [];
// Generate one layout item per child.
let layout: Layout = [];
if (children.length > 0) {
children.map( (child: Vue) => {
const props = child.$props;
const key = props.i;
const exists = getLayoutItem(initialLayout, String(key));
if (exists) {
layout.push(cloneLayoutItem(exists));
} else {
const {x, y, w, h, immobile} = props;
if (x !== undefined && y !== undefined && w !== undefined && h !== undefined && key !== undefined ) {
layout.push(cloneLayoutItem({ x, y, w, h, immobile: immobile ? immobile : false, i: String(key)}));
} else {
layout.push(cloneLayoutItem({
w: 1,
h: 1,
x: 0,
y: bottom(layout),
i: String(key),
}));
}
}
});
} else {
layout = initialLayout;
}
// Correct the layout.
layout = correctBounds(layout, { cols });
layout = compact(layout, compactType, cols);
return layout;
}
/**
* Validate a layout. Throws errors.
*
* @param {Array} layout Array of layout items.
* @param {String} [contextName] Context name for errors.
* @throw {Error} Validation error.
*/
export function validateLayout(
layout: Layout,
contextName: string = 'Layout',
): boolean {
const subProps = ['x', 'y', 'w', 'h'];
if (!Array.isArray(layout)) {
throw new Error(contextName + ' must be an array!');
}
for (const lay of layout) {
const item = lay;
for (const subProp of subProps) {
if (typeof item[subProp] !== 'number') {
throw new Error(
'GridLayout: ' +
contextName +
'[' +
lay +
'].' +
subProp +
' must be a number!',
);
}
}
if (item.i && typeof item.i !== 'string') {
throw new Error(
'GridLayout: ' + contextName + '[' + lay + '].i must be a string!',
);
}
if (item.immobile !== undefined && typeof item.immobile !== 'boolean') {
throw new Error(
'GridLayout: ' +
contextName +
'[' +
lay +
'].immobile must be a boolean!',
);
}
}
return true;
}
// Flow can't really figure this out, so we just use Object
export function autoBindHandlers(el: object, fns: string[]): void {
fns.forEach((key) => (el[key] = el[key].bind(el)));
}
function log(...args) {
if (!DEBUG) {
return;
}
// eslint-disable-next-line no-console
console.log(...args);
} | the_stack |
import {
DescriptorProto,
EnumDescriptorProto,
EnumOptions,
EnumValueDescriptorProto,
EnumValueOptions,
FieldDescriptorProto,
FieldOptions,
FileDescriptorProto,
FileOptions,
MessageOptions,
MethodDescriptorProto,
MethodOptions,
OneofDescriptorProto,
OneofOptions,
ServiceDescriptorProto,
ServiceOptions
} from "./google/protobuf/descriptor";
import {AnyDescriptorProto, AnyOptions, AnyTypeDescriptorProto, isAnyTypeDescriptorProto} from "./descriptor-info";
import {assert, assertNever} from "@protobuf-ts/runtime";
/**
* Return the logical parent of the given descriptor.
*
* If there is no parent, return `undefined`. This should
* only be the case for `FileDescriptorProto`.
*/
export type DescriptorParentFn = (descriptor: AnyDescriptorProto) => AnyDescriptorProto | undefined;
/**
* Can lookup the ancestry of a descriptor.
*/
export interface IDescriptorTree {
/**
* Lists known files.
*/
allFiles(): readonly FileDescriptorProto[];
/**
* Return the immediate parent of the given descriptor.
*
* Returns `undefined` for a file descriptor.
*
* Returns the parent descriptor for an option.
*/
parentOf(descriptor: FieldDescriptorProto): DescriptorProto;
parentOf(descriptor: AnyDescriptorProto): AnyDescriptorProto | undefined;
parentOf(options: FileOptions): FileDescriptorProto;
parentOf(options: MessageOptions): DescriptorProto;
parentOf(options: FieldOptions): FileDescriptorProto;
parentOf(options: OneofOptions): OneofDescriptorProto;
parentOf(options: EnumOptions): FieldDescriptorProto;
parentOf(options: EnumValueOptions): EnumValueDescriptorProto;
parentOf(options: ServiceOptions): ServiceDescriptorProto;
parentOf(options: MethodOptions): MethodDescriptorProto;
/**
* Return the file where the descriptor was declared.
*
* If a file descriptor is passed, returns the
* file descriptor itself.
*/
fileOf(descriptor: AnyDescriptorProto): FileDescriptorProto;
/**
* Returns all ancestors of the given descriptor, up to
* the file descriptor where the descriptor was declared.
*/
ancestorsOf(descriptor: AnyDescriptorProto): AnyDescriptorProto[];
/**
* Visit all known descriptors and all their descendants.
*/
visit(visitor: (descriptor: AnyDescriptorProto) => void): void;
/**
* Visit all descendants of the given descriptor.
*/
visit(startingFrom: AnyDescriptorProto, visitor: (descriptor: AnyDescriptorProto) => void): void;
/**
* Visit all known type descriptors and their
* descendant types.
*/
visitTypes(visitor: (descriptor: AnyTypeDescriptorProto) => void): void;
/**
* Visit the type children of the descriptor
* and their descendant types.
*/
visitTypes(startingFrom: AnyDescriptorProto, visitor: (descriptor: AnyTypeDescriptorProto) => void): void;
}
export class DescriptorTree implements IDescriptorTree {
private readonly _files: ReadonlyArray<FileDescriptorProto>;
private readonly _descriptors: ReadonlyMap<AnyDescriptorProto, Entry>;
private readonly _options: ReadonlyMap<AnyOptions, AnyDescriptorProto>;
/**
* Create the tree from a list of root files.
*/
static from(...files: FileDescriptorProto[]): DescriptorTree {
const descriptors: Array<[AnyDescriptorProto, Entry]> = [];
const options: Array<[AnyOptions, AnyDescriptorProto]> = [];
for (const file of files) {
visitDescriptorTree(file, (descriptor, ancestors) => {
descriptors.push([descriptor, {ancestors, file, parent: ancestors[ancestors.length - 1]}]);
if (descriptor.options) {
options.push([descriptor.options, descriptor]);
}
});
}
return new DescriptorTree(descriptors, options);
}
private constructor(descriptors: ReadonlyArray<[AnyDescriptorProto, Entry]>, options: ReadonlyArray<[AnyOptions, AnyDescriptorProto]>) {
const descriptorMap = new Map<AnyDescriptorProto, Entry>();
const optionMap = new Map<AnyOptions, AnyDescriptorProto>();
const files = [];
for (const [descriptor, info] of descriptors) {
// infos
assert(!descriptorMap.has(descriptor));
descriptorMap.set(descriptor, info);
// files
if (FileDescriptorProto.is(descriptor)) {
files.push(descriptor);
}
}
for (const [option, descriptor] of options) {
optionMap.set(option, descriptor);
}
this._files = files;
this._descriptors = descriptorMap;
this._options = optionMap;
}
ancestorsOf(descriptor: AnyDescriptorProto): AnyDescriptorProto[] {
const v = this._descriptors.get(descriptor);
assert(v !== undefined);
return v.ancestors.concat();
}
fileOf(descriptor: AnyDescriptorProto): FileDescriptorProto {
const v = this._descriptors.get(descriptor);
assert(v !== undefined);
return v.file;
}
allFiles(): readonly FileDescriptorProto[] {
return this._files;
}
parentOf(descriptor: FieldDescriptorProto): DescriptorProto;
parentOf(descriptor: AnyDescriptorProto): AnyDescriptorProto | undefined;
parentOf(options: FileOptions): FileDescriptorProto;
parentOf(options: MessageOptions): DescriptorProto;
parentOf(options: FieldOptions): FileDescriptorProto;
parentOf(options: OneofOptions): OneofDescriptorProto;
parentOf(options: EnumOptions): FieldDescriptorProto;
parentOf(options: EnumValueOptions): EnumValueDescriptorProto;
parentOf(options: ServiceOptions): ServiceDescriptorProto;
parentOf(options: MethodOptions): MethodDescriptorProto;
parentOf(descriptorOrOptions: AnyDescriptorProto | AnyOptions): AnyDescriptorProto | undefined {
const optionParent = this._options.get(descriptorOrOptions as any);
if (optionParent) {
return optionParent;
}
const descriptorEntry = this._descriptors.get(descriptorOrOptions as any);
if (descriptorEntry) {
return descriptorEntry.parent;
}
assert(FileDescriptorProto.is(descriptorOrOptions));
return undefined;
}
visit(visitor: (descriptor: AnyDescriptorProto) => void): void;
visit(startingFrom: AnyDescriptorProto, visitor: (descriptor: AnyDescriptorProto) => void): void;
visit(a: any, b?: any) {
if (b === undefined) {
for (const file of this._files) {
visitDescriptorTree(file, a);
}
} else {
const startingFrom = a as AnyDescriptorProto;
visitDescriptorTree(startingFrom, descriptor => {
if (descriptor === a) {
return; // visitDescriptorTree invokes on starting element. ignore.
}
b(descriptor)
});
}
}
visitTypes(visitor: (descriptor: AnyTypeDescriptorProto) => void): void;
visitTypes(startingFrom: AnyDescriptorProto, visitor: (descriptor: AnyTypeDescriptorProto) => void): void;
visitTypes(a: any, b?: any): void {
if (b === undefined) {
for (const file of this._files) {
visitDescriptorTree(file, descriptor => {
if (isAnyTypeDescriptorProto(descriptor)) {
a(descriptor);
}
});
}
} else {
visitDescriptorTree(a, descriptor => {
if (descriptor === a) {
return; // visitDescriptorTree invokes on starting element. ignore.
}
if (isAnyTypeDescriptorProto(descriptor)) {
b(descriptor);
}
});
}
}
}
type Entry = {
readonly ancestors: readonly AnyDescriptorProto[],
readonly parent: AnyDescriptorProto | undefined,
readonly file: FileDescriptorProto,
};
type VisitorFn = (descriptor: AnyDescriptorProto, carry: readonly AnyDescriptorProto[]) => void;
/**
* Visit all logical children of the given descriptor proto.
*
* The "visitor" function is called for each element,
* including the input. It receives two arguments:
* 1) the current descriptor proto
* 2) the ancestors of the current descriptor proto (an array of descriptors)
*/
export function visitDescriptorTree(input: AnyDescriptorProto, visitor: VisitorFn): void {
visitWithCarry(input, [], visitor);
}
function visitWithCarry(input: AnyDescriptorProto, carry: readonly AnyDescriptorProto[], visitor: VisitorFn): void {
visitor(input, carry);
carry = carry.concat(input);
// noinspection SuspiciousTypeOfGuard
if (EnumDescriptorProto.is(input)) {
for (const val of input.value) {
visitWithCarry(val, carry, visitor);
}
} else if (DescriptorProto.is(input)) {
for (const oneof of input.oneofDecl) {
visitWithCarry(oneof, carry, visitor);
}
for (const field of input.field) {
visitWithCarry(field, carry, visitor);
}
for (const message of input.nestedType) {
visitWithCarry(message, carry, visitor);
}
for (const enu of input.enumType) {
visitWithCarry(enu, carry, visitor);
}
for (const extensionField of input.extension) {
visitWithCarry(extensionField, carry, visitor);
}
} else if (FileDescriptorProto.is(input)) {
for (const message of input.messageType) {
visitWithCarry(message, carry, visitor);
}
for (const enu of input.enumType) {
visitWithCarry(enu, carry, visitor);
}
for (const service of input.service) {
visitWithCarry(service, carry, visitor);
}
for (const extensionField of input.extension) {
visitWithCarry(extensionField, carry, visitor);
}
} else if (ServiceDescriptorProto.is(input)) {
for (const method of input.method) {
visitWithCarry(method, carry, visitor);
}
} else if (EnumValueDescriptorProto.is(input)) {
//
} else if (FieldDescriptorProto.is(input)) {
//
} else if (MethodDescriptorProto.is(input)) {
//
} else if (OneofDescriptorProto.is(input)) {
//
} else {
assertNever(input);
}
} | the_stack |
import { btoa } from './base/stringutil';
import asyncutil = require('./base/asyncutil');
import agile_keychain_crypto = require('./agile_keychain_crypto');
import crypto = require('./base/crypto');
import item_builder = require('./item_builder');
import item_store = require('./item_store');
import key_agent = require('./key_agent');
import mem_key_value_store = require('./base/mem_key_value_store');
import local_store = require('./local_store');
import testLib = require('./test');
interface Env {
masterPass: string;
masterKey: key_agent.Key;
keyAgent: key_agent.SimpleKeyAgent;
database: mem_key_value_store.Database;
databaseName: string;
}
function generateKey(
password: string,
iterations: number
): Promise<key_agent.Key> {
let masterKey = crypto.randomBytes(1024);
let salt = crypto.randomBytes(8);
let derivedKey = key_agent.keyFromPassword(password, salt, iterations);
return derivedKey
.then(derivedKey => {
return key_agent.encryptKey(derivedKey, masterKey);
})
.then(encryptedKey => {
return {
format: key_agent.KeyFormat.AgileKeychainKey,
identifier: agile_keychain_crypto.newUUID(),
data: btoa('Salted__' + salt + encryptedKey.key),
iterations: iterations,
validation: btoa(encryptedKey.validation),
};
});
}
let masterPass = 'testpass';
let masterKey: key_agent.Key;
function setupEnv(): Env {
let keyAgent = new key_agent.SimpleKeyAgent();
let database = new mem_key_value_store.Database();
let masterPass = 'testpass';
return {
masterPass: masterPass,
masterKey: masterKey,
keyAgent: keyAgent,
database: database,
databaseName: 'test',
};
}
function setupStoreAndUnlock() {
let env = setupEnv();
let store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
return store
.saveKeys([env.masterKey], 'hint')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return store;
});
}
testLib.addTest('save and load keys and hint', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
var params: key_agent.CryptoParams = {
algo: key_agent.CryptoAlgorithm.AES128_OpenSSLKey,
};
var passHint = 'password hint';
return store
.saveKeys([env.masterKey], passHint)
.then(() => {
return asyncutil.all2([store.listKeys(), store.passwordHint()]);
})
.then(keysAndHint => {
var keys = <key_agent.Key[]>keysAndHint[0];
var hint = <string>keysAndHint[1];
assert.equal(keys.length, 1);
assert.equal(hint, passHint);
return store.unlock(env.masterPass);
})
.then(() => {
return env.keyAgent.encrypt(
env.masterKey.identifier,
'testcontent',
params
);
})
.then(encrypted => {
return env.keyAgent.decrypt(
env.masterKey.identifier,
encrypted,
params
);
})
.then(plainText => {
assert.equal(plainText, 'testcontent');
// reset the store and try unlocking again
store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
return env.keyAgent.forgetKeys();
})
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return env.keyAgent.encrypt(
env.masterKey.identifier,
'testcontent2',
params
);
})
.then(encrypted => {
return env.keyAgent.decrypt(
env.masterKey.identifier,
encrypted,
params
);
})
.then(plainText => {
assert.equal(plainText, 'testcontent2');
});
});
function makeItem() {
return new item_builder.Builder(item_store.ItemTypes.LOGIN)
.setTitle('test item')
.addLogin('foo.bar@gmail.com')
.addPassword('pass3')
.addUrl('acme.org')
.addUrl('foo.acme.org')
.item();
}
testLib.addTest('save and load items', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
var item = makeItem();
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return item.saveTo(store);
})
.then(() => {
return store.listItems();
})
.then(items => {
// FIXME testLib.assertEqual() uses xdiff.diff() which
// has a bug that incorrectly reports a property
// deletion if two objects have a key with
// value undefined
item.parentRevision = item.parentRevision || null;
items[0].parentRevision = items[0].parentRevision || null;
assert.equal(items.length, 1);
testLib.assertEqual(assert, items[0], item, null, [
'store',
'content',
'openContents',
]);
return items[0].getContent();
})
.then(content => {
assert.deepEqual(content.urls, [
{
label: 'website',
url: 'acme.org',
},
{
label: 'website',
url: 'foo.acme.org',
},
]);
});
});
testLib.addTest('save and load item revisions', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
var item = makeItem();
item.title = 'Initial Title';
var firstRevision = '';
var secondRevision = '';
var thirdRevision = '';
assert.equal(item.revision, null);
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return item.saveTo(store);
})
.then(() => {
firstRevision = item.revision;
assert.notEqual(item.revision, null);
item.title = 'Updated Title';
return item.saveTo(store);
})
.then(() => {
assert.notEqual(item.revision, firstRevision);
return store.loadItem(item.uuid, firstRevision);
})
.then(firstItemRevision => {
assert.equal(firstItemRevision.item.parentRevision, null);
assert.equal(firstItemRevision.item.revision, firstRevision);
assert.equal(firstItemRevision.item.title, 'Initial Title');
return store.loadItem(item.uuid, item.revision);
})
.then(secondItemRevision => {
secondRevision = secondItemRevision.item.revision;
assert.equal(secondItemRevision.item.parentRevision, firstRevision);
assert.equal(secondItemRevision.item.revision, item.revision);
assert.equal(secondItemRevision.item.title, 'Updated Title');
item.trashed = true;
return item.saveTo(store);
})
.then(() => {
thirdRevision = item.revision;
assert.equal(item.parentRevision, secondRevision);
assert.notEqual(item.revision, secondRevision);
return store.listItems();
})
.then(items => {
assert.equal(items.length, 1);
assert.equal(items[0].revision, thirdRevision);
assert.equal(items[0].parentRevision, secondRevision);
});
});
testLib.addTest('clear store', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
var item = makeItem();
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return item.saveTo(store);
})
.then(() => {
return store.listItems();
})
.then(items => {
assert.equal(items.length, 1);
return store.clear();
})
.then(() => {
return store.listItems();
})
.then(items => {
assert.equal(items.length, 0);
});
});
testLib.addTest('unlock store with no keys', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
return store
.unlock(env.masterPass)
.then(() => {
return false;
})
.catch(err => {
return err;
})
.then(result => {
assert.ok(result instanceof Error);
});
});
testLib.addTest('get/set last sync data', assert => {
let store: local_store.Store;
let item = makeItem();
const CLOUD_STORE_ID = 'cloud';
return setupStoreAndUnlock()
.then(store_ => {
store = store_;
})
.then(() => {
return store.lastSyncRevisions(CLOUD_STORE_ID);
})
.then(revisions => {
assert.equal(revisions.size, 0);
return item.saveTo(store);
})
.then(() => {
assert.notEqual(item.revision, null);
return store.getLastSyncedRevision(item.uuid, CLOUD_STORE_ID);
})
.then(revision => {
assert.equal(revision, null);
return store.setLastSyncedRevision(item, CLOUD_STORE_ID, {
local: item.revision,
external: '1',
});
})
.then(() => {
return store.getLastSyncedRevision(item.uuid, CLOUD_STORE_ID);
})
.then(revision => {
assert.equal(revision.local, item.revision);
assert.equal(revision.external, '1');
return store.lastSyncRevisions(CLOUD_STORE_ID);
})
.then(revisions => {
assert.equal(revisions.size, 1);
assert.equal(revisions.get(item.uuid).local, item.revision);
});
});
testLib.addTest('item revision updates on save', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
var item = makeItem();
var revisions: string[] = [];
// the item revision should change on each save and the parent
// revision should be equal to the previous revision
assert.equal(item.revision, null);
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return item.saveTo(store);
})
.then(() => {
revisions.push(item.revision);
assert.notEqual(item.revision, null);
return item.saveTo(store);
})
.then(() => {
revisions.push(item.revision);
assert.notEqual(item.revision, revisions[0]);
assert.equal(item.parentRevision, revisions[0]);
return item.saveTo(store);
})
.then(() => {
revisions.push(item.revision);
assert.notEqual(item.revision, revisions[1]);
assert.equal(item.parentRevision, revisions[1]);
});
});
testLib.addTest('updating keys replaces existing keys', assert => {
var env = setupEnv();
var store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
env.masterKey.identifier = 'KEY1';
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.listKeys();
})
.then(keys => {
assert.equal(keys.length, 1);
env.masterKey.identifier = 'KEY2';
return store.saveKeys([env.masterKey], '');
})
.then(() => {
return store.listKeys();
})
.then(keys => {
assert.equal(keys.length, 1);
});
});
interface StoreWithItem {
store: item_store.Store;
item: item_store.Item;
}
function createStoreWithItem() {
let env = setupEnv();
let store = new local_store.Store(
env.database,
env.databaseName,
env.keyAgent
);
let item = makeItem();
return store
.saveKeys([env.masterKey], '')
.then(() => {
return store.unlock(env.masterPass);
})
.then(() => {
return item.saveTo(store);
})
.then(() => ({
store: store,
item: item,
}));
}
testLib.addTest('list item states', assert => {
var storeAndItem: StoreWithItem;
return createStoreWithItem()
.then(storeAndItem_ => {
storeAndItem = storeAndItem_;
return storeAndItem.store.listItemStates();
})
.then(items => {
assert.deepEqual(items, [
{
uuid: storeAndItem.item.uuid,
revision: storeAndItem.item.revision,
deleted: false,
},
]);
});
});
testLib.cancelAutoStart();
generateKey(masterPass, 100).then(key => {
masterKey = key;
testLib.start();
}); | the_stack |
import * as path from 'path';
import { Parser, ComponentDoc } from 'react-docgen-typescript';
import ts, { SymbolFlags, TypeFlags, SyntaxKind } from 'typescript';
import { isEmpty, isEqual } from 'lodash';
import { existsSync, readFileSync } from 'fs-extra';
import findConfig from 'find-config';
import { debug } from '../../core';
import { Json } from '../../types';
import { transformItem } from '../transform';
import generateDTS from './generateDTS';
import { IParseArgs } from '../index';
const log = debug.extend('parse:ts');
type ExtendedType = ts.Type & {
id: string;
typeArguments: any[];
};
function getNextParentIds(parentIds: number[], type: ts.Type) {
// @ts-ignore
const id = type?.symbol?.id;
if (id) {
return [...parentIds, id];
}
return parentIds;
}
function getSymbolName(symbol: ts.Symbol) {
// @ts-ignore
const prefix: string = symbol?.parent && getSymbolName(symbol.parent);
const name = symbol.getName();
if (prefix && prefix.length <= 20) {
return `${prefix}.${name}`;
}
return name;
}
function getFunctionParams(parameters: any[] = [], checker, parentIds, type) {
return parameters.map((node) => {
const typeObject = checker.getTypeOfSymbolAtLocation(node.symbol, node.symbol.valueDeclaration);
const v = getDocgenTypeHelper(checker, typeObject, false, getNextParentIds(parentIds, type));
const name = node.symbol.escapedName;
return {
name,
propType: v,
};
});
}
function getFunctionReturns(node: any, checker, parentIds, type) {
if (!node) return {};
const propType = getDocgenTypeHelper(
checker,
node.type,
false,
getNextParentIds(parentIds, type),
);
return {
propType,
};
}
const blacklistNames = [
'prototype',
'getDerivedStateFromProps',
'propTypes',
'defaultProps',
'contextTypes',
'displayName',
'contextType',
'Provider',
'Consumer',
];
const blacklistPatterns = [
/^HTML/,
/^React\./,
/^Object$/,
/^Date$/,
/^Promise$/,
/^XML/,
/^Function$/,
];
// function hasTooManyTypes(type) {
// return type?.types?.length >= 20;
// }
function isComplexType(type) {
let isAliasSymbol = false;
let symbol = type?.symbol;
if (!symbol) {
symbol = type?.aliasSymbol;
isAliasSymbol = true;
}
if (!symbol) return false;
if (isAliasSymbol) {
return false;
}
const name = getSymbolName(symbol);
if (blacklistPatterns.some((patt) => patt.test(name))) {
return true;
}
return false;
}
function getDocgenTypeHelper(
checker: ts.TypeChecker,
type: ts.Type,
skipRequired = false,
parentIds: number[] = [],
isRequired = false,
): any {
function isTuple(_type: ts.Type) {
// @ts-ignore use internal methods
return checker.isArrayLikeType(_type) && !checker.isArrayType(_type);
}
let required: boolean;
if (isRequired !== undefined) {
required = isRequired;
} else {
required = !(type.flags & SymbolFlags.Optional) || isRequired;
}
function makeResult(typeInfo: Json) {
if (skipRequired) {
return {
raw: checker.typeToString(type),
...typeInfo,
};
} else {
return {
required,
raw: checker.typeToString(type),
...typeInfo,
};
}
}
function getShapeFromArray(symbolArr: ts.Symbol[], _type: ts.Type) {
const shape: Array<{
key:
| {
name: string;
}
| string;
value: any;
}> = symbolArr.map((prop) => {
const propType = checker.getTypeOfSymbolAtLocation(
prop,
// @ts-ignore
prop.valueDeclaration || (prop.declarations && prop.declarations[0]) || {},
);
return {
key: prop.getName(),
value: getDocgenTypeHelper(
checker,
propType,
false,
// @ts-ignore
getNextParentIds(parentIds, _type),
// @ts-ignore
!prop?.valueDeclaration?.questionToken,
),
};
});
// @ts-ignore use internal methods
if (checker.isArrayLikeType(_type)) {
return shape;
}
if (_type.getStringIndexType()) {
// @ts-ignore use internal methods
if (!_type.stringIndexInfo) {
return shape;
}
shape.push({
key: {
name: 'string',
},
value: getDocgenTypeHelper(
checker,
// @ts-ignore use internal methods
_type.stringIndexInfo.type,
false,
getNextParentIds(parentIds, _type),
),
});
} else if (_type.getNumberIndexType()) {
// @ts-ignore use internal methods
if (!_type.numberIndexInfo) {
return shape;
}
shape.push({
key: {
name: 'number',
},
value: getDocgenTypeHelper(
checker,
// @ts-ignore use internal methods
_type.numberIndexInfo.type,
false,
getNextParentIds(parentIds, _type),
),
});
}
return shape;
}
function getShape(_type: ts.Type) {
const { symbol } = _type;
if (symbol && symbol.members) {
// @ts-ignore
const props: ts.Symbol[] = Array.from(symbol.members.values());
// if (props.length >= 20) {
// throw new Error('too many props');
// }
return getShapeFromArray(
props.filter((prop) => prop.getName() !== '__index'),
_type,
);
} else {
// @ts-ignore
const args = _type.resolvedTypeArguments || [];
const props = checker.getPropertiesOfType(_type);
// if (props.length >= 20) {
// throw new Error('too many props');
// }
const shape = getShapeFromArray(props.slice(0, args.length), _type);
return shape;
}
}
// @ts-ignore
if (type?.kind === SyntaxKind.VoidExpression) {
return makeResult({
name: 'void',
raw: 'void',
});
}
const pattern = /^__global\.(.+)$/;
// @ts-ignore
if (parentIds.includes(type?.symbol?.id)) {
return makeResult({
name: 'object', // checker.typeToString(type),
});
}
if (type.symbol) {
const symbolName = getSymbolName(type.symbol);
if (symbolName) {
const matches = pattern.exec(symbolName);
if (matches) {
return makeResult({
name: matches[1],
});
}
}
}
if (type.flags & TypeFlags.Number) {
return makeResult({
name: 'number',
});
} else if (type.flags & TypeFlags.String) {
return makeResult({
name: 'string',
});
} else if (type.flags & TypeFlags.NumberLiteral) {
return makeResult({
name: 'literal',
// @ts-ignore
value: type.value,
});
} else if (type.flags & TypeFlags.Literal) {
return makeResult({
name: 'literal',
value: checker.typeToString(type),
});
} else if (type.symbol?.flags & SymbolFlags.Enum) {
return makeResult({
name: 'union',
// @ts-ignore
value: type.types.map((t) => t.value),
});
// @ts-ignore
} else if (type.flags & TypeFlags.DisjointDomains) {
return makeResult({
name: checker.typeToString(type),
});
} else if (type.flags & TypeFlags.Any) {
return makeResult({
name: 'any',
});
} else if (type.flags & TypeFlags.Union && !isComplexType(type)) {
return makeResult({
name: 'union',
// @ts-ignore
value: type.types.map((t) =>
getDocgenTypeHelper(checker, t, true, getNextParentIds(parentIds, type))),
});
} else if (isComplexType(type)) {
return makeResult({
name: getSymbolName(type?.symbol || type?.aliasSymbol),
});
} else if (type.flags & (TypeFlags.Object | TypeFlags.Intersection)) {
if (isTuple(type)) {
try {
const props = getShape(type);
return makeResult({
name: 'tuple',
value: props.map((p) => p.value),
});
} catch (e) {
return makeResult({
name: 'object',
});
}
// @ts-ignore
} else if (checker.isArrayType(type)) {
return makeResult({
name: 'Array',
// @ts-ignore
elements: [
getDocgenTypeHelper(
checker,
(type as ExtendedType).typeArguments[0],
false,
getNextParentIds(parentIds, type),
),
],
});
// @ts-ignore
} else if (type?.symbol?.valueDeclaration?.parameters?.length) {
return makeResult({
name: 'func',
params: getFunctionParams(
// @ts-ignore
type?.symbol?.valueDeclaration?.parameters,
checker,
parentIds,
type,
),
returns: getFunctionReturns(
checker.typeToTypeNode(type, type?.symbol?.valueDeclaration),
checker,
parentIds,
type,
),
});
} else if (
// @ts-ignore
type?.members?.get('__call')?.declarations[0]?.symbol?.declarations[0]?.parameters?.length
) {
return makeResult({
name: 'func',
params: getFunctionParams(
// @ts-ignore
type?.members?.get('__call')?.declarations[0]?.symbol?.declarations[0]?.parameters,
checker,
parentIds,
type,
),
});
} else {
try {
const props = getShape(type);
return makeResult({
name: 'signature',
type: {
signature: {
properties: props,
},
},
});
} catch (e) {
return makeResult({
name: 'object',
});
}
}
} else {
return makeResult({
name: 'object',
});
}
}
class MyParser extends Parser {
getDocgenType(propType: ts.Type): any {
const parentIds = [];
// @ts-ignore
const parentId = propType?.symbol?.parent?.id;
if (parentId) {
parentIds.push(parentId);
}
// @ts-ignore
const result = getDocgenTypeHelper(this.checker, propType, true, parentIds);
return result;
}
// override the builtin method, to avoid the false positive
public extractPropsFromTypeIfStatelessComponent(type: ts.Type): ts.Symbol | null {
const callSignatures = type.getCallSignatures();
if (callSignatures.length) {
// Could be a stateless component. Is a function, so the props object we're interested
// in is the (only) parameter.
for (const sig of callSignatures) {
const params = sig.getParameters();
if (params.length === 0) {
continue;
}
// @ts-ignore
const returnSymbol = this.checker.getReturnTypeOfSignature(sig);
if (!returnSymbol) continue;
const symbol = returnSymbol?.symbol;
if (!symbol) continue;
// @ts-ignore
const typeString = this.checker.symbolToString(symbol);
if (
typeString.startsWith('ReactElement') ||
typeString.startsWith('Element') ||
typeString.startsWith('RaxElement')
) {
const propsParam = params[0];
if (propsParam) {
return propsParam;
}
}
}
}
return null;
}
}
const getCompilerOptions = (reactTypePath, originalReactTypePath) => {
const options: any = {
jsx: ts.JsxEmit.React,
module: ts.ModuleKind.CommonJS,
target: ts.ScriptTarget.Latest,
allowSyntheticDefaultImports: true,
};
// if (reactTypePath) {
// options.paths = {
// react: [reactTypePath],
// };
// options.exclude = [path.dirname(originalReactTypePath)];
// options.types = [];
// options.skipLibCheck = true;
// }
return options;
};
interface SymbolWithMeta extends ts.Symbol {
meta?: {
exportName: string;
subName?: string;
};
}
function getComponentName(exportName, displayName) {
if (displayName) {
const firstCharCode = displayName.charCodeAt(0);
if (firstCharCode >= 65 && firstCharCode <= 90) {
return displayName || exportName;
}
}
return exportName;
}
const defaultTsConfigPath = path.resolve(__dirname, './tsconfig.json');
export default function parseTS(filePath: string, args: IParseArgs): ComponentDoc[] {
if (!filePath) return [];
let basePath = args.moduleDir || args.workDir || path.dirname(filePath);
let tsConfigPath = findConfig('tsconfig.json', { cwd: basePath }); // path.resolve(basePath, 'tsconfig.json')
if (
!tsConfigPath ||
!existsSync(tsConfigPath) ||
(args.accesser === 'online' && tsConfigPath === 'tsconfig.json')
) {
tsConfigPath = defaultTsConfigPath;
} else {
basePath = path.dirname(tsConfigPath);
}
log('ts config path is', tsConfigPath);
const { config, error } = ts.readConfigFile(tsConfigPath, (filename) =>
readFileSync(filename, 'utf8'));
if (error !== undefined) {
const errorText = `Cannot load custom tsconfig.json from provided path: ${tsConfigPath}, with error code: ${error.code}, message: ${error.messageText}`;
throw new Error(errorText);
}
const { options, errors } = ts.parseJsonConfigFileContent(
config,
ts.sys,
basePath,
{},
tsConfigPath,
);
if (errors && errors.length) {
throw errors[0];
}
log('ts config is', options);
// const filePaths = Array.isArray(filePathOrPaths) ? filePathOrPaths : [filePathOrPaths];
generateDTS(args);
const program = ts.createProgram([filePath], options);
const parser = new MyParser(program, {});
const checker = program.getTypeChecker();
const result = [filePath]
.map((fPath) => program.getSourceFile(fPath))
.filter((sourceFile) => typeof sourceFile !== 'undefined')
.reduce((docs: any[], sourceFile) => {
const moduleSymbol = checker.getSymbolAtLocation(sourceFile as ts.Node);
if (!moduleSymbol) {
return docs;
}
const exportSymbols = checker.getExportsOfModule(moduleSymbol);
for (let index = 0; index < exportSymbols.length; index++) {
const sym: SymbolWithMeta = exportSymbols[index];
const name = sym.getName();
if (blacklistNames.includes(name)) {
continue;
}
// polyfill valueDeclaration
sym.valueDeclaration =
sym.valueDeclaration || (Array.isArray(sym.declarations) && sym.declarations[0]);
if (!sym.valueDeclaration) {
continue;
}
const info = parser.getComponentInfo(sym, sourceFile);
if (info === null) {
continue;
}
const exportName = sym.meta && sym.meta.exportName;
const meta = {
subName: exportName ? name : '',
exportName: exportName || name,
};
if (docs.find((x) => isEqual(x.meta, meta))) {
continue;
}
docs.push({
...info,
meta,
});
// find sub components
if (!!sym.declarations && sym.declarations.length === 0) {
continue;
}
const type = checker.getTypeOfSymbolAtLocation(
sym,
sym.valueDeclaration || sym.declarations[0],
);
Array.prototype.push.apply(
exportSymbols,
type.getProperties().map((x: SymbolWithMeta) => {
x.meta = { exportName: name };
return x;
}),
);
}
return docs;
}, []);
const coms = result.reduce((res: any[], info: any) => {
if (!info || !info.props || isEmpty(info.props)) return res;
const props = Object.keys(info.props).reduce((acc: any[], name) => {
// omit aria related properties temporarily
if (name.startsWith('aria-')) {
return acc;
}
try {
const item: any = transformItem(name, info.props[name]);
acc.push(item);
} catch (e) {
log(e);
}
return acc;
}, []);
const exportName = info?.meta?.exportName;
res.push({
componentName: getComponentName(exportName, info.displayName),
props,
meta: info.meta || {},
});
return res;
}, []);
return coms;
} | the_stack |
import {ClusterRoleBinding, useRbac} from '../hooks/useRbac'
import {useUsers} from '../hooks/useUsers'
import React, {useCallback, useEffect, useState} from 'react'
import uuid from 'uuid'
import ClusterAccessRadio from './ClusterAccessRadio'
import {templateClusterResourceRolePrefix} from '../constants'
import Templates from './Templates'
import {FullScreenLoader} from './Loader'
import Summary from './Summary'
import {useHistory} from 'react-router-dom'
import {AggregatedRoleBinding, extractUsersRoles} from "../services/role";
import {User} from "../types";
import {ClusterAccess} from "./types";
import {httpRequests} from "../services/httpRequests";
import {Dialog} from "@reach/dialog";
import CreateKubeconfigButton from "./CreateKubeconfigButton";
interface EditUserParameters {
readonly user: User;
}
/**
* extract the initial clusterBindingRoleValue.
* @param clusterRoleBinding ClusterRoleBinding
* @todo this bootstrap cases are based off an enum. To implement dynamic cluster roles it needs to be refactored
* @return ClusterAccess|null - null if no state change needed.
*/
function getClusterBindindingAccessValue(clusterRoleBinding: ClusterRoleBinding): ClusterAccess | null {
if (clusterRoleBinding.roleRef.name.endsWith('admin')) {
return 'write'
}
if (clusterRoleBinding.roleRef.name.endsWith('read-only')) {
return 'read'
}
return null;
}
export default function EditUser({user}: EditUserParameters) {
const [showLoader, setShowLoader] = useState<boolean>(false)
const username = user.name
const {clusterRoleBindings, roleBindings, refreshRbacData} = useRbac()
const history = useHistory()
const {refreshUsers} = useUsers()
useEffect(() => {
refreshRbacData()
}, [refreshRbacData])
const {rbs, crbs, extractedPairItems} = extractUsersRoles(roleBindings, clusterRoleBindings, username);
const [clusterAccess, setClusterAccess] = useState<ClusterAccess>('none')
const [initialClusterAccess, setInitialClusterAccess] = useState<ClusterAccess>(null)
const [aggregatedRoleBindings, setAggregatedRoleBindings] = useState<AggregatedRoleBinding[]>([])
const [canCheckLegacyUser, setCanCheckLegacyUser] = useState(false);
const [showLegacyMigrationModal, setShowLegacyMigrationModal] = useState(false);
useEffect(() => {
// means that aggragatedRoleBindings is already bootstrapped
if (aggregatedRoleBindings.length !== 0) {
return;
}
// we proceed to bootstrap aggragatedRoleBindings
setAggregatedRoleBindings(extractedPairItems)
setCanCheckLegacyUser(true);
// we bootstrap clusterRoleBinding value.
const clusterRoleBinding = crbs.find(crb => crb.metadata.name.includes(templateClusterResourceRolePrefix))
if (!clusterRoleBinding) {
return;
}
const clusterBindingAccessValue = getClusterBindindingAccessValue(clusterRoleBinding)
// if null we don't set any state.
if (!clusterBindingAccessValue) {
return;
}
// we bootstrap initialClusterAccess if its value is null
if (initialClusterAccess === null) {
setInitialClusterAccess(clusterBindingAccessValue)
}
setClusterAccess(clusterBindingAccessValue)
}, [crbs, initialClusterAccess, aggregatedRoleBindings.length, extractedPairItems])
useEffect(() => {
async function checkLegacyUser(): Promise<boolean> {
setShowLoader(true)
const namespaces = aggregatedRoleBindings.reduce((acc, aggregatedRoleBinding) => {
if (aggregatedRoleBinding.namespaces === "ALL_NAMESPACES") {
return acc
}
return [...new Set([...acc, ...aggregatedRoleBinding.namespaces])]
}, [] as string[])
const legacyUserReq = await httpRequests.checkLegacyUser(username, namespaces)
return legacyUserReq.data.legacyUserDetected;
}
if (aggregatedRoleBindings.length !== 0 && canCheckLegacyUser) {
checkLegacyUser().then((legacyUserDetected) => {
setShowLoader(false)
setCanCheckLegacyUser(false)
if (legacyUserDetected) {
setShowLegacyMigrationModal(true)
}
})
}
}, [aggregatedRoleBindings, username, canCheckLegacyUser])
async function handleUserDeletion() {
setShowLoader(true)
await deleteUserResources()
await httpRequests.userRequests.delete(username)
}
/**
* delete all the user-resources currently in the k8s cluster
*/
async function deleteUserResources() {
await httpRequests.rolebindingRequests.delete.rolebinding(rbs);
await httpRequests.rolebindingRequests.delete.clusterRolebinding(crbs);
}
async function handleSubmit(e, reloadAfterSubmit = true) {
//we delete all the user resources.
await deleteUserResources()
//we create the resources choosen in the UI
await httpRequests.rolebindingRequests.create.fromAggregatedRolebindings(
aggregatedRoleBindings,
username,
clusterAccess
);
if (reloadAfterSubmit) {
window.location.reload()
}
}
const savePair: (p: AggregatedRoleBinding) => void = useCallback(p => {
setAggregatedRoleBindings(state => {
if (state.find(x => x.id === p.id)) {
return state.map(x => x.id === p.id ? p : x)
}
return [...state, p]
})
}, [])
const addEmptyPair = useCallback(() => {
setAggregatedRoleBindings(state => [...state, {id: uuid.v4(), namespaces: [], template: ''}])
}, [])
const saveButtonDisabled = aggregatedRoleBindings.length === 0 || aggregatedRoleBindings.some(p => p.namespaces.length === 0)
if (crbs && crbs.length === 0 && rbs && rbs.length === 0) {
return <div>...loading</div>
}
return (
<div>
{showLoader && <FullScreenLoader/>}
{showLegacyMigrationModal && <LegacyUserModal user={user} upgradeUser={handleSubmit}
close={() => setShowLegacyMigrationModal(false)}
username={username}></LegacyUserModal>
}
<div className="flex content-between items-center mb-4">
<h2 className="text-3xl text-gray-800">
User: <span data-testid="username-heading">{username}</span>
</h2>
<div>
<button
tabIndex={-1}
type="button"
className="bg-transparent hover:bg-red-600 text-gray-700 hover:text-gray-100 py-1 px-2 rounded hover:shadow ml-2 text-xs"
onClick={() => {
const confirmed = window.confirm(
`Confirm deletion of User ${username}`
)
if (confirmed) {
handleUserDeletion().then(async () => {
await refreshUsers()
history.push('/')
})
}
}}
>
delete
</button>
</div>
</div>
<form
onSubmit={e => {
e.preventDefault()
setShowLoader(true)
handleSubmit(e)
}}
>
<div className="mb-6">
<Templates
pairItems={aggregatedRoleBindings}
savePair={savePair}
setPairItems={setAggregatedRoleBindings}
addEmptyPair={addEmptyPair}
/>
</div>
<ClusterAccessRadio
clusterAccess={clusterAccess}
setClusterAccess={setClusterAccess}
/>
<hr className="my-6"/>
<button
className={`bg-blue-500 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded shadow ${
saveButtonDisabled ? ' opacity-50 cursor-not-allowed' : ''
}`}
disabled={saveButtonDisabled}
type="submit"
>
save
</button>
</form>
{aggregatedRoleBindings.length > 0 && aggregatedRoleBindings.some(p => p.namespaces.length > 0) ? (
<>
<div className="mt-12 mb-4"/>
<Summary pairItems={aggregatedRoleBindings}/>
</>
) : null}
</div>
)
}
interface LegacyUserModalProps {
close();
upgradeUser(event, reload);
username: string;
user: User;
}
function LegacyUserModal({close, user, username, upgradeUser}: LegacyUserModalProps) {
const [upgradingUser, setUpgradingUser] = useState(true);
useEffect(() => {
async function handleUpgradeUser() {
await upgradeUser(null, false)
}
setUpgradingUser(true);
handleUpgradeUser().then(() => {
setUpgradingUser(false);
})
}, [upgradeUser])
return (
<Dialog
className="max-w-4xl mx-auto bg-white shadow-md rounded px-8 pt-4 pb-8 mb-4"
isOpen={true}
>
<div>
<div>
<div className="flex justify-between">
<h2 className="text-3xl mb-4 text-gray-800">
Legacy user detected
</h2>
</div>
<div>
<p>Users from versions older than 1.6.0 have to be upgraded in order to work with the new kubeconfig.</p>
<p>Upgrading user <strong>{username}</strong>...</p>
{
!upgradingUser &&
<p>Done!</p>
}
</div>
<div className="flex mt-4">
<div>
<button
className={`bg-blue-500 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded shadow ${
upgradingUser ? ' opacity-50 cursor-not-allowed' : ''
}`}
disabled={upgradingUser}
onClick={() => {
close();
}}>
Ok
</button>
</div>
<div className="ml-4">
<CreateKubeconfigButton user={user}/>
</div>
</div>
</div>
<div className="flex w-full mt-4 flex-col" style={{backgroundColor: "#fff9e8", padding: 8, borderRadius: 4, border: "1px solid #8a6a0a"}}>
<h3 className="mb-1" style={{color: "#8a6a0a"}}><strong>Notice</strong></h3>
<p style={{color: "#343741"}}>The old kubeconfig file won't work anymore</p>
</div>
</div>
</Dialog>
)
} | the_stack |
import { sandbox } from './helpers'
import { createNext } from 'e2e-utils'
import { NextInstance } from 'test/lib/next-modes/base'
describe('ReactRefreshRequire', () => {
let next: NextInstance
beforeAll(async () => {
next = await createNext({
files: {},
skipStart: true,
})
})
afterAll(() => next.destroy())
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L989-L1048
test('re-runs accepted modules', async () => {
const { session, cleanup } = await sandbox(next)
await session.patch(
'index.js',
`export default function Noop() { return null; };`
)
await session.write(
'./foo.js',
`window.log.push('init FooV1'); require('./bar');`
)
await session.write(
'./bar.js',
`window.log.push('init BarV1'); export default function Bar() { return null; };`
)
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'index.js',
`require('./foo'); export default function Noop() { return null; };`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init FooV1',
'init BarV1',
])
// We only edited Bar, and it accepted.
// So we expect it to re-run alone.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'./bar.js',
`window.log.push('init BarV2'); export default function Bar() { return null; };`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV2',
])
// We only edited Bar, and it accepted.
// So we expect it to re-run alone.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'./bar.js',
`window.log.push('init BarV3'); export default function Bar() { return null; };`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV3',
])
// TODO:
// expect(Refresh.performReactRefresh).toHaveBeenCalled();
// expect(Refresh.performFullRefresh).not.toHaveBeenCalled();
await cleanup()
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1050-L1137
test('propagates a hot update to closest accepted module', async () => {
const { session, cleanup } = await sandbox(next)
await session.patch(
'index.js',
`export default function Noop() { return null; };`
)
await session.write(
'./foo.js',
`
window.log.push('init FooV1');
require('./bar');
// Exporting a component marks it as auto-accepting.
export default function Foo() {};
`
)
await session.write('./bar.js', `window.log.push('init BarV1');`)
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'index.js',
`require('./foo'); export default function Noop() { return null; };`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init FooV1',
'init BarV1',
])
// We edited Bar, but it doesn't accept.
// So we expect it to re-run together with Foo which does.
await session.evaluate(() => ((window as any).log = []))
await session.patch('./bar.js', `window.log.push('init BarV2');`)
expect(await session.evaluate(() => (window as any).log)).toEqual([
// // FIXME: Metro order:
// 'init BarV2',
// 'init FooV1',
'init FooV1',
'init BarV2',
// Webpack runs in this order because it evaluates modules parent down, not
// child up. Parents will re-run child modules in the order that they're
// imported from the parent.
])
// We edited Bar, but it doesn't accept.
// So we expect it to re-run together with Foo which does.
await session.evaluate(() => ((window as any).log = []))
await session.patch('./bar.js', `window.log.push('init BarV3');`)
expect(await session.evaluate(() => (window as any).log)).toEqual([
// // FIXME: Metro order:
// 'init BarV3',
// 'init FooV1',
'init FooV1',
'init BarV3',
// Webpack runs in this order because it evaluates modules parent down, not
// child up. Parents will re-run child modules in the order that they're
// imported from the parent.
])
// We edited Bar so that it accepts itself.
// We still re-run Foo because the exports of Bar changed.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'./bar.js',
`
window.log.push('init BarV3');
// Exporting a component marks it as auto-accepting.
export default function Bar() {};
`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
// // FIXME: Metro order:
// 'init BarV3',
// 'init FooV1',
'init FooV1',
'init BarV3',
// Webpack runs in this order because it evaluates modules parent down, not
// child up. Parents will re-run child modules in the order that they're
// imported from the parent.
])
// Further edits to Bar don't re-run Foo.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'./bar.js',
`
window.log.push('init BarV4');
export default function Bar() {};
`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV4',
])
// TODO:
// expect(Refresh.performReactRefresh).toHaveBeenCalled();
// expect(Refresh.performFullRefresh).not.toHaveBeenCalled();
await cleanup()
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1139-L1307
test('propagates hot update to all inverse dependencies', async () => {
const { session, cleanup } = await sandbox(next)
await session.patch(
'index.js',
`export default function Noop() { return null; };`
)
// This is the module graph:
// MiddleA*
// / \
// Root* - MiddleB* - Leaf
// \
// MiddleC
//
// * - accepts update
//
// We expect that editing Leaf will propagate to
// MiddleA and MiddleB both of which can handle updates.
await session.write(
'root.js',
`
window.log.push('init RootV1');
import './middleA';
import './middleB';
import './middleC';
export default function Root() {};
`
)
await session.write(
'middleA.js',
`
log.push('init MiddleAV1');
import './leaf';
export default function MiddleA() {};
`
)
await session.write(
'middleB.js',
`
log.push('init MiddleBV1');
import './leaf';
export default function MiddleB() {};
`
)
// This one doesn't import leaf and also doesn't export a component (so it
// doesn't accept updates).
await session.write(
'middleC.js',
`log.push('init MiddleCV1'); export default {};`
)
// Doesn't accept its own updates; they will propagate.
await session.write(
'leaf.js',
`log.push('init LeafV1'); export default {};`
)
// Bootstrap:
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'index.js',
`require('./root'); export default function Noop() { return null; };`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init LeafV1',
'init MiddleAV1',
'init MiddleBV1',
'init MiddleCV1',
'init RootV1',
])
// We edited Leaf, but it doesn't accept.
// So we expect it to re-run together with MiddleA and MiddleB which do.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'leaf.js',
`log.push('init LeafV2'); export default {};`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init LeafV2',
'init MiddleAV1',
'init MiddleBV1',
])
// Let's try the same one more time.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'leaf.js',
`log.push('init LeafV3'); export default {};`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init LeafV3',
'init MiddleAV1',
'init MiddleBV1',
])
// Now edit MiddleB. It should accept and re-run alone.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'middleB.js',
`
log.push('init MiddleBV2');
import './leaf';
export default function MiddleB() {};
`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init MiddleBV2',
])
// Finally, edit MiddleC. It didn't accept so it should bubble to Root.
await session.evaluate(() => ((window as any).log = []))
await session.patch(
'middleC.js',
`log.push('init MiddleCV2'); export default {};`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init MiddleCV2',
'init RootV1',
])
// TODO:
// expect(Refresh.performReactRefresh).toHaveBeenCalled()
// expect(Refresh.performFullRefresh).not.toHaveBeenCalled()
await cleanup()
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1309-L1406
test('runs dependencies before dependents', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1408-L1498
test('provides fresh value for module.exports in parents', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1500-L1590
test('provides fresh value for exports.* in parents', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1592-L1688
test('provides fresh value for ES6 named import in parents', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1690-L1786
test('provides fresh value for ES6 default import in parents', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1788-L1899
test('stops update propagation after module-level errors', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L1901-L2010
test('can continue hot updates after module-level errors with module.exports', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2012-L2123
test('can continue hot updates after module-level errors with ES6 exports', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2125-L2233
test('does not accumulate stale exports over time', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2235-L2279
test('bails out if update bubbles to the root via the only path', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2281-L2371
test('bails out if the update bubbles to the root via one of the paths', async () => {
// TODO:
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2373-L2472
test('propagates a module that stops accepting in next version', async () => {
const { session, cleanup } = await sandbox(next)
// Accept in parent
await session.write(
'./foo.js',
`;(typeof global !== 'undefined' ? global : window).log.push('init FooV1'); import './bar'; export default function Foo() {};`
)
// Accept in child
await session.write(
'./bar.js',
`;(typeof global !== 'undefined' ? global : window).log.push('init BarV1'); export default function Bar() {};`
)
// Bootstrap:
await session.patch(
'index.js',
`;(typeof global !== 'undefined' ? global : window).log = []; require('./foo'); export default () => null;`
)
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV1',
'init FooV1',
])
let didFullRefresh = false
// Verify the child can accept itself:
await session.evaluate(() => ((window as any).log = []))
didFullRefresh =
didFullRefresh ||
!(await session.patch(
'./bar.js',
`window.log.push('init BarV1.1'); export default function Bar() {};`
))
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV1.1',
])
// Now let's change the child to *not* accept itself.
// We'll expect that now the parent will handle the evaluation.
await session.evaluate(() => ((window as any).log = []))
didFullRefresh =
didFullRefresh ||
!(await session.patch(
'./bar.js',
// It's important we still export _something_, otherwise webpack will
// also emit an extra update to the parent module. This happens because
// webpack converts the module from ESM to CJS, which means the parent
// module must update how it "imports" the module (drops interop code).
// TODO: propose that webpack interrupts the current update phase when
// `module.hot.invalidate()` is called.
`window.log.push('init BarV2'); export {};`
))
// We re-run Bar and expect to stop there. However,
// it didn't export a component, so we go higher.
// We stop at Foo which currently _does_ export a component.
expect(await session.evaluate(() => (window as any).log)).toEqual([
// Bar evaluates twice:
// 1. To invalidate itself once it realizes it's no longer acceptable.
// 2. As a child of Foo re-evaluating.
'init BarV2',
'init BarV2',
'init FooV1',
])
// Change it back so that the child accepts itself.
await session.evaluate(() => ((window as any).log = []))
didFullRefresh =
didFullRefresh ||
!(await session.patch(
'./bar.js',
`window.log.push('init BarV2'); export default function Bar() {};`
))
// Since the export list changed, we have to re-run both the parent
// and the child.
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV2',
'init FooV1',
])
// TODO:
// expect(Refresh.performReactRefresh).toHaveBeenCalled();
// expect(Refresh.performFullRefresh).not.toHaveBeenCalled();
expect(didFullRefresh).toBe(false)
// But editing the child alone now doesn't reevaluate the parent.
await session.evaluate(() => ((window as any).log = []))
didFullRefresh =
didFullRefresh ||
!(await session.patch(
'./bar.js',
`window.log.push('init BarV3'); export default function Bar() {};`
))
expect(await session.evaluate(() => (window as any).log)).toEqual([
'init BarV3',
])
// Finally, edit the parent in a way that changes the export.
// It would still be accepted on its own -- but it's incompatible
// with the past version which didn't have two exports.
await session.evaluate(() => window.localStorage.setItem('init', ''))
didFullRefresh =
didFullRefresh ||
!(await session.patch(
'./foo.js',
`
if (typeof window !== 'undefined' && window.localStorage) {
window.localStorage.setItem('init', 'init FooV2')
}
export function Foo() {};
export function FooFoo() {};`
))
// Check that we attempted to evaluate, but had to fall back to full refresh.
expect(
await session.evaluate(() => window.localStorage.getItem('init'))
).toEqual('init FooV2')
// expect(Refresh.performFullRefresh).toHaveBeenCalled();
expect(didFullRefresh).toBe(true)
await cleanup()
})
// https://github.com/facebook/metro/blob/b651e535cd0fc5df6c0803b9aa647d664cb9a6c3/packages/metro/src/lib/polyfills/__tests__/require-test.js#L2474-L2521
test('can replace a module before it is loaded', async () => {
// TODO:
})
}) | the_stack |
import { injectable } from "inversify";
import { Point, centerOfLine } from '../../utils/geometry';
import { SChildElement } from '../../base/model/smodel';
import { VNode } from "snabbdom/vnode";
import { SModelElement, SModelIndex, SModelRoot } from "../../base/model/smodel";
import { findParentByFeature } from "../../base/model/smodel-utils";
import { Action } from "../../base/actions/action";
import { ICommand, CommandExecutionContext, MergeableCommand } from "../../base/commands/command";
import { Animation } from "../../base/animations/animation";
import { MouseListener } from "../../base/views/mouse-tool";
import { setAttr } from "../../base/views/vnode-utils";
import { IVNodeDecorator } from "../../base/views/vnode-decorators";
import { isViewport } from "../viewport/model";
import { isSelectable } from "../select/model";
import { isAlignable } from "../bounds/model";
import { Routable, isRoutable, SRoutingHandle } from '../edit/model';
import { MoveRoutingHandleAction, HandleMove, SwitchEditModeAction } from "../edit/edit-routing";
import { isMoveable, Locateable, isLocateable } from './model';
import { RoutedPoint } from "../../graph/routing";
export class MoveAction implements Action {
kind = MoveCommand.KIND;
constructor(public readonly moves: ElementMove[],
public readonly animate: boolean = true) {
}
}
export interface ElementMove {
elementId: string
fromPosition?: Point
toPosition: Point
}
export interface ResolvedElementMove {
elementId: string
element: SModelElement & Locateable
fromPosition: Point
toPosition: Point
}
export interface ResolvedElementRoute {
elementId: string
element: SModelElement & Routable
fromRoute: Point[]
toRoute: Point[]
}
export class MoveCommand extends MergeableCommand {
static readonly KIND = 'move';
resolvedMoves: Map<string, ResolvedElementMove> = new Map;
resolvedRoutes: Map<string, ResolvedElementRoute> = new Map;
constructor(protected action: MoveAction) {
super();
}
execute(context: CommandExecutionContext) {
const model = context.root;
const attachedElements: Set<SModelElement> = new Set;
this.action.moves.forEach(move => {
const resolvedMove = this.resolve(move, model.index);
if (resolvedMove !== undefined) {
this.resolvedMoves.set(resolvedMove.elementId, resolvedMove);
model.index.getAttachedElements(resolvedMove.element).forEach(e => attachedElements.add(e));
}
});
attachedElements.forEach(element => this.handleAttachedElement(element));
if (this.action.animate) {
return new MoveAnimation(model, this.resolvedMoves, this.resolvedRoutes, context).start();
} else {
return this.doMove(context);
}
}
protected resolve(move: ElementMove, index: SModelIndex<SModelElement>): ResolvedElementMove | undefined {
const element = index.getById(move.elementId);
if (element !== undefined && isLocateable(element)) {
const fromPosition = move.fromPosition || { x: element.position.x, y: element.position.y };
return {
elementId: move.elementId,
element: element,
fromPosition: fromPosition,
toPosition: move.toPosition
};
}
return undefined;
}
protected handleAttachedElement(element: SModelElement): void {
if (isRoutable(element)) {
const source = element.source;
const sourceMove = source ? this.resolvedMoves.get(source.id) : undefined;
const target = element.target;
const targetMove = target ? this.resolvedMoves.get(target.id) : undefined;
if (sourceMove !== undefined && targetMove !== undefined) {
const deltaX = targetMove.toPosition.x - targetMove.fromPosition.x;
const deltaY = targetMove.toPosition.y - targetMove.fromPosition.y;
this.resolvedRoutes.set(element.id, {
elementId: element.id,
element,
fromRoute: element.routingPoints,
toRoute: element.routingPoints.map(rp => ({
x: rp.x + deltaX,
y: rp.y + deltaY
}))
});
}
}
}
protected doMove(context: CommandExecutionContext, reverse?: boolean): SModelRoot {
this.resolvedMoves.forEach(res => {
if (reverse)
res.element.position = res.fromPosition;
else
res.element.position = res.toPosition;
});
this.resolvedRoutes.forEach(res => {
if (reverse)
res.element.routingPoints = res.fromRoute;
else
res.element.routingPoints = res.toRoute;
});
return context.root;
}
undo(context: CommandExecutionContext) {
return new MoveAnimation(context.root, this.resolvedMoves, this.resolvedRoutes, context, true).start();
}
redo(context: CommandExecutionContext) {
return new MoveAnimation(context.root, this.resolvedMoves, this.resolvedRoutes, context, false).start();
}
merge(command: ICommand, context: CommandExecutionContext) {
if (!this.action.animate && command instanceof MoveCommand) {
command.action.moves.forEach(
otherMove => {
const existingMove = this.resolvedMoves.get(otherMove.elementId);
if (existingMove) {
existingMove.toPosition = otherMove.toPosition;
} else {
const resolvedMove = this.resolve(otherMove, context.root.index);
if (resolvedMove)
this.resolvedMoves.set(resolvedMove.elementId, resolvedMove);
}
}
);
return true;
}
return false;
}
}
export class MoveAnimation extends Animation {
constructor(protected model: SModelRoot,
public elementMoves: Map<string, ResolvedElementMove>,
public elementRoutes: Map<string, ResolvedElementRoute>,
context: CommandExecutionContext,
protected reverse: boolean = false) {
super(context);
}
tween(t: number) {
this.elementMoves.forEach((elementMove) => {
if (this.reverse) {
elementMove.element.position = {
x: (1 - t) * elementMove.toPosition.x + t * elementMove.fromPosition.x,
y: (1 - t) * elementMove.toPosition.y + t * elementMove.fromPosition.y
};
} else {
elementMove.element.position = {
x: (1 - t) * elementMove.fromPosition.x + t * elementMove.toPosition.x,
y: (1 - t) * elementMove.fromPosition.y + t * elementMove.toPosition.y
};
}
});
this.elementRoutes.forEach(elementRoute => {
const route: Point[] = [];
for (let i = 0; i < elementRoute.fromRoute.length && i < elementRoute.toRoute.length; i++) {
const fp = elementRoute.fromRoute[i];
const tp = elementRoute.toRoute[i];
if (this.reverse) {
route.push({
x: (1 - t) * tp.x + t * fp.x,
y: (1 - t) * tp.y + t * fp.y
});
} else {
route.push({
x: (1 - t) * fp.x + t * tp.x,
y: (1 - t) * fp.y + t * tp.y
});
}
}
elementRoute.element.routingPoints = route;
});
return this.model;
}
}
export class MoveMouseListener extends MouseListener {
hasDragged = false;
lastDragPosition: Point | undefined;
mouseDown(target: SModelElement, event: MouseEvent): Action[] {
const result: Action[] = [];
if (event.button === 0) {
const moveable = findParentByFeature(target, isMoveable);
const isRoutingHandle = target instanceof SRoutingHandle;
if (moveable !== undefined || isRoutingHandle) {
this.lastDragPosition = { x: event.pageX, y: event.pageY };
} else {
this.lastDragPosition = undefined;
}
this.hasDragged = false;
if (isRoutingHandle) {
result.push(new SwitchEditModeAction([target.id], []));
}
}
return result;
}
mouseMove(target: SModelElement, event: MouseEvent): Action[] {
const result: Action[] = [];
if (event.buttons === 0)
this.mouseUp(target, event);
else if (this.lastDragPosition) {
const viewport = findParentByFeature(target, isViewport);
this.hasDragged = true;
const zoom = viewport ? viewport.zoom : 1;
const dx = (event.pageX - this.lastDragPosition.x) / zoom;
const dy = (event.pageY - this.lastDragPosition.y) / zoom;
const nodeMoves: ElementMove[] = [];
const handleMoves: HandleMove[] = [];
target.root.index.all()
.filter(element => isSelectable(element) && element.selected)
.forEach(element => {
if (isMoveable(element)) {
nodeMoves.push({
elementId: element.id,
fromPosition: {
x: element.position.x,
y: element.position.y
},
toPosition: {
x: element.position.x + dx,
y: element.position.y + dy
}
});
} else if (element instanceof SRoutingHandle) {
const point = this.getHandlePosition(element);
if (point !== undefined) {
handleMoves.push({
elementId: element.id,
fromPosition: point,
toPosition: {
x: point.x + dx,
y: point.y + dy
}
});
}
}
});
this.lastDragPosition = { x: event.pageX, y: event.pageY };
if (nodeMoves.length > 0)
result.push(new MoveAction(nodeMoves, false));
if (handleMoves.length > 0)
result.push(new MoveRoutingHandleAction(handleMoves, false));
}
return result;
}
protected getHandlePosition(handle: SRoutingHandle): Point | undefined {
const parent = handle.parent;
if (!isRoutable(parent)) {
return undefined;
}
if (handle.kind === 'line') {
const getIndex = (rp: RoutedPoint) => {
if (rp.pointIndex !== undefined)
return rp.pointIndex;
else if (rp.kind === 'target')
return parent.routingPoints.length;
else
return -1;
};
const route = parent.route();
let rp1, rp2: RoutedPoint | undefined;
for (const rp of route) {
const i = getIndex(rp);
if (i <= handle.pointIndex && (rp1 === undefined || i > getIndex(rp1)))
rp1 = rp;
if (i > handle.pointIndex && (rp2 === undefined || i < getIndex(rp2)))
rp2 = rp;
}
if (rp1 !== undefined && rp2 !== undefined) {
return centerOfLine(rp1, rp2);
}
} else if (handle.pointIndex >= 0) {
return parent.routingPoints[handle.pointIndex];
}
return undefined;
}
mouseEnter(target: SModelElement, event: MouseEvent): Action[] {
if (target instanceof SModelRoot && event.buttons === 0)
this.mouseUp(target, event);
return [];
}
mouseUp(target: SModelElement, event: MouseEvent): Action[] {
const result: Action[] = [];
if (this.lastDragPosition) {
target.root.index.all()
.forEach(element => {
if (element instanceof SRoutingHandle && element.editMode)
result.push(new SwitchEditModeAction([], [element.id]));
});
}
this.hasDragged = false;
this.lastDragPosition = undefined;
return result;
}
decorate(vnode: VNode, element: SModelElement): VNode {
return vnode;
}
}
@injectable()
export class LocationDecorator implements IVNodeDecorator {
decorate(vnode: VNode, element: SModelElement): VNode {
let translate: string = '';
if (isLocateable(element) && element instanceof SChildElement && element.parent !== undefined) {
translate = 'translate(' + element.position.x + ', ' + element.position.y + ')';
}
if (isAlignable(element)) {
if (translate.length > 0)
translate += ' ';
translate += 'translate(' + element.alignment.x + ', ' + element.alignment.y + ')';
}
if (translate.length > 0)
setAttr(vnode, 'transform', translate);
return vnode;
}
postUpdate(): void {
}
} | the_stack |
import EventEmitter from '~/core/EventEmitter';
import {
AnyObjectType,
AppDataElementsTypes,
ArgumentsArray,
ArgumentsBoolean,
ArgumentsItem,
ArgumentsNumber,
ArgumentsString,
} from '~/types/appData';
import { Modules } from '~/types/modules';
import config from './Lottery.config';
import Wrapper from '../Wrapper';
import useLifeCycle from '~/hooks/useLifeCycle';
import useStyles from './Lottery.useStyles';
import Game, { GameRecords, GameModal } from '~/components/Game';
import { GameHandle, GameMap } from '~/components/Game/useGame';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import * as mock from './mockData';
import { getArgumentsItem } from '~/core/getArgumentsTypeDataFromDataSource';
import message from '~/components/Message';
import requester from '~/core/fetch';
import { Prize } from '@byhealth/lottery/dist/types/core';
import { useSelector } from 'react-redux';
import { RootState } from '~/redux/store';
import { getPrizeById } from './helper';
import { debounce } from 'lodash';
import { gametypes } from '~/components/Game/Game';
import s from './Lottery.module.less';
import { saveAddressParames } from '~/components/Game/GameType';
import classNames from 'classnames';
export interface RecordsType extends Prize {
/**中奖id */
id?: number | string;
/**收货地址 */
receiverAddress?: string;
/**收货姓名 */
receiverName?: string;
/**收货电话 */
receiverPhone?: string;
/**中奖时间 */
winTime?: string;
[keys: string]: any;
}
export interface LotteryProps extends AppDataElementsTypes {
id: string;
eventEmitter: EventEmitter;
}
const Lottery: Modules<LotteryProps> = (props) => {
const { moduleId, style } = props;
const { currentEditorStylePath } = useSelector(
(state: RootState) => state.controller
);
const [prizes, setPrizes] = useState<Prize[]>([]);
const [phoneAndRCardId, setPhoneAndRCardId] = useState<AnyObjectType>();
const [receiverInfo, setReceiverInfo] = useState<AnyObjectType>(
mock.receiverInfo
);
const [successmodalParams, setSuccessmodalParams] = useState<AnyObjectType>(
{}
);
const [type, setType] = useState<keyof GameMap>('redenvelope');
const [displayRecord, setDisplayRecord] = useState<boolean>(false);
const [displayRule, setDisplayRule] = useState<boolean>(false);
const [ruleText, setRuleText] = useState<any[]>([]);
const winInfo = useRef<Prize>();
// 禁用抽奖
const checked = useRef<{
/**禁用信息 */
message?: string;
/**是否禁用 */
enabled: boolean;
}>({ enabled: true });
// 确定数据是否准备就是,没有准备好时会启用模拟数据
const prizesIsReadyRef = useRef<boolean>();
// 此ref用于存储useLifeCycle组件暴露的事件
const dispatchEventRef = useRef<{
mount: () => void;
unmount: () => void;
onStart: () => void;
onEnd: () => void;
onCancel: () => void;
onEnsure: () => void;
onShowSuccess: () => void;
onShowFailed: () => void;
onShowAddress: () => void;
}>();
const gameHandle = useRef<GameHandle<typeof Game>>(null);
// 页面api
const { api } = props;
// 皮肤设置样式
const MId = `gametarget${moduleId}_${type}`;
useStyles(MId, style, type);
/**保存地址 */
const handleSaveAddress = useCallback(() => {
const handlers = gameHandle.current;
handlers?.game.current?.core.AddressModal.showModal((address) => {
console.log(address);
});
}, [gameHandle]);
// ===========================================组件Api============================================
//#region
/**
* 抽奖前置Api, 用于检查是否满足抽奖条件
* */
const apiBeforeStart = useCallback(async () => {
const apiArguments = api?.find((item) => item.apiId === 'beforeStart');
// 获取抽奖结果数据, 将结果数据中转到全局数据中
if (apiArguments && apiArguments.url && apiArguments.method) {
try {
return requester(apiArguments || {});
} catch (error) {
throw error;
}
}
}, [api]);
/**
* 抽奖Api, 用于抽奖
*/
const apiStart = useCallback(async () => {
const apiArguments = api?.find((item) => item.apiId === 'lottery');
// 获取抽奖结果数据, 将结果数据中转到全局数据中
if (apiArguments && apiArguments.url && apiArguments.method) {
return requester(apiArguments || {});
}
}, [api]);
/**
* 保存地址Api, 用于实物奖品保存地址信息
*/
const apiSaveAddress = useCallback(
async (data: saveAddressParames) => {
const oprationData = {
winId: 'winInfo.current.id',
...data,
};
// 这里不需要api设置参数
const apiArguments = api?.find(
(item) => item.apiId === 'saveAddress'
);
// 获取抽奖结果数据, 将结果数据中转到全局数据中
if (apiArguments) {
apiArguments.body = [
{
type: 'object',
fieldName: 'addressData',
data: oprationData,
},
];
return requester(apiArguments || {});
}
// 处理收货地址
message.warning('没有设置保存地址Api, 当前不可保存!');
},
[api]
);
/**
* 检查手机验证码
* */
const checkVerificationCode = useCallback(
async (data) => {
// 这里不需要api设置参数
const apiArguments = api?.find(
(item) => item.apiId === 'getVerificationCode'
);
// 获取抽奖结果数据, 将结果数据中转到全局数据中
if (apiArguments) {
apiArguments.body = [
{ type: 'object', fieldName: 'addressData', data },
];
return requester(apiArguments || {});
}
// 处理收货地址
message.warning('没有设置获取验证码Api!');
},
[api]
);
const apiGetRecord = useCallback(async () => {
const apiArguments = api?.find((item) => item.apiId === 'getRecord');
// 获取抽奖结果数据, 将结果数据中转到全局数据中
if (apiArguments && apiArguments.url && apiArguments.method) {
return requester(apiArguments || {});
}
}, [api]);
//#endregion
// ===========================================组件方法============================================ //
//#region
/**
* 修改抽奖类型
*/
const setGameType = useCallback((type: ArgumentsString) => {
const argOptType = getArgumentsItem(type) as keyof GameMap;
if (gametypes.includes(argOptType)) {
setType(argOptType);
}
}, []);
// 设置文本
const setRules = useCallback((rules: ArgumentsItem) => {
const rulesTexts = getArgumentsItem(rules) as any[];
setRuleText(rulesTexts);
}, []);
/**
* 设置奖品数据, 无数据时使用mock
*/
const setRunningPrizes = useCallback((prizes) => {
const prizesArg = getArgumentsItem(prizes) as any[];
if (Array.isArray(prizesArg) && prizesArg.length) {
setPrizes(prizesArg);
prizesIsReadyRef.current = true;
}
// 没有准备过数据会使用mock数据
if (!prizesIsReadyRef.current) {
setPrizes(mock.prizes);
}
}, []);
/**
* 设置中奖记录
*/
const [records, setRecords] = useState<RecordsType[]>(mock.records);
const [disablePullDown, setDisablePullDown] = useState(false);
const [disablePullUp, setDisablePullUp] = useState(false);
const onSaveAddress = useCallback(
(item) => () => {
console.log(item);
return handleSaveAddress();
},
[handleSaveAddress]
);
/**
* 设置运行时中奖记录
*/
const setRunningRecords = useCallback(
(
records: ArgumentsArray,
disablePullDown: ArgumentsString,
disablePullUp: ArgumentsString
) => {
// 中奖记录
const recordArg = getArgumentsItem(records) as any[];
const disablePullDownArg = getArgumentsItem(
disablePullDown
) as string;
const disablePullUpArg = getArgumentsItem(disablePullUp) as string;
if (Array.isArray(recordArg) && recordArg.length) {
setRecords(recordArg);
}
setDisablePullDown(disablePullDownArg === '0');
setDisablePullUp(disablePullUpArg === '0');
},
[]
);
/**
* 渲染中奖记录
*/
const renderRecords = useCallback(
() =>
records?.length ? (
<ul className={classNames(s.recordwrap, `${MId}_records_list`)}>
{records.map((item, index) => {
return (
<li key={index} className={ classNames(s.recorditem, `${MId}_records_list_item`)}>
<div className={classNames(s.prizeimg, `${MId}_records_list_item_prizeimg_wrap`)}>
<img
className={`${MId}_records_list_item_prizeimg`}
src={item.prizeImg}
alt={item.prizeName}
/>
</div>
<div className={classNames(s.recordstr, `${MId}_records_list_item_text`)}>
<div className={classNames(s.prizename, `${MId}_records_list_item_prizename`)}>
{item.prizeName}
</div>
<div className={s.timeandbutton}>
<div className={classNames(s.wintime, `${MId}_records_list_item_wintime`)}>
{item.winTime}
</div>
{item.receiveType === 2 &&
!item.receiverAddress ? (
<button
onClick={onSaveAddress(item)}
className={`${MId}_records_list_item_saveaddress`}
>
填写地址
</button>
) : null}
</div>
{item.receiverAddress ? (
<div className={classNames(s.receiveraddress, `${MId}_records_list_item_address`)}>
收货地址:{item.receiverAddress}
</div>
) : null}
</div>
</li>
);
})}
</ul>
) : (
<div>暂无中奖记录</div>
),
[MId, onSaveAddress, records]
);
/**
* 设置玩家基本信息
* @param phone 设置玩家手机号码
* @param cardIdRequest 设置领取奖品时是否需要填写身份证1 隐藏,2 验证,3 为空时不验证有填写时验证,4 不验证
*/
const useConfig = useCallback(
(phone: ArgumentsString, cardIdRequest: ArgumentsNumber) => {
const argOptPhone = getArgumentsItem(phone);
const argCardIdRequest = getArgumentsItem(cardIdRequest);
setPhoneAndRCardId({
phone: argOptPhone,
cardIdRequest: argCardIdRequest,
});
},
[]
);
/**
* 设置中奖弹窗
* @param title 中奖弹窗标题
* @param animation 中奖弹窗动画
*/
const setSuccessModal = useCallback(
(title: ArgumentsString, animation: ArgumentsString) => {
setSuccessmodalParams({
title: getArgumentsItem(title),
animation: getArgumentsItem(animation),
});
},
[]
);
/**
* 检查抽奖
* @param
*/
const checkedLottery = useCallback(
(enabled: ArgumentsBoolean, message: ArgumentsString) => {
const argEnabled = getArgumentsItem(enabled) as boolean;
const argMessage = getArgumentsItem(message) as string;
checked.current = {
enabled: argEnabled,
message: argMessage,
};
},
[]
);
// hank 等待
const setDelayStart = useCallback(
() =>
new Promise((res) =>
setTimeout(() => {
dispatchEventRef.current?.onStart();
res(null);
})
),
[]
);
/**
* 开始抽奖
* */
const startLottery = useCallback(async () => {
// step1、执行前置api 用于抽奖前检查是否满足抽奖条件
await apiBeforeStart();
// step2 执行抽奖事件
await setDelayStart();
// step3、检查状态是否可以抽奖
if (!checked.current.enabled) {
console.log('没有权限,请勿抽奖!');
message.error(checked.current?.message || '暂无抽奖权限!');
throw checked.current?.message;
}
// step4、返回抽奖接口
const settedApi = (await apiStart()) as AnyObjectType;
// step5、执行结束事件,可用于重置数据
dispatchEventRef.current?.onEnd();
if (settedApi?.response?.prizeId !== undefined) {
let currentPrize = getPrizeById(settedApi.response.prizeId, prizes);
// 回填当前操作奖品
winInfo.current = currentPrize;
return currentPrize;
}
// 没有设置Api时启用mock数据
if (!settedApi) {
message.warning('活动奖品或抽奖Api未设置正确, 当前使用模拟抽奖!');
const winData =
prizes[Math.floor(Math.random() * prizes.length - 1)];
// 回填当前操作奖品
winInfo.current = winData;
return winData;
}
}, [apiBeforeStart, apiStart, checked, prizes, setDelayStart]);
/**
* 设置默认实物奖品邮寄地址,用于地址填写时回填信息
* @param receiverPhone 收货电话
* @param regionName 收货姓名
* @param region 收货人省市区
* @param address 收货人详细地址
* @param idCard 人身份证id
*/
const setDefaultReceiveInfo = useCallback(
(
receiverPhone: ArgumentsString,
regionName: ArgumentsString,
region: ArgumentsString,
address: ArgumentsString,
idCard: ArgumentsString
) => {
const handlers = gameHandle.current!;
const argReceiverPhone = getArgumentsItem(receiverPhone);
let argRegionName: any = getArgumentsItem(regionName);
let argRegion: any = getArgumentsItem(region);
const argAddress = getArgumentsItem(address);
const argIdCard = getArgumentsItem(idCard);
argRegionName = argRegionName
?.replace(/,/g, ',')
?.split(',')
?.filter(Boolean);
argRegion = argRegion?.replace(/,/g, ',')?.split(',');
const parames = {
receiverPhone: argReceiverPhone,
address: argAddress,
region: argRegion,
idCard: argIdCard,
};
if (!!argRegionName.length) {
(parames as any).regionName = argRegionName;
}
handlers.game.current?.core.AddressModal.updateParams(
parames as any
);
setReceiverInfo(parames);
},
[gameHandle]
);
/**通过其他事件关联抽奖 */
const lottery = useCallback(() => {
const handlers = gameHandle.current!;
handlers.game.current?.core.lottery();
}, [gameHandle]);
/**显示中奖记录 */
const showRecord = useCallback(async () => {
await apiGetRecord();
setDisplayRecord(true);
}, [apiGetRecord]);
/**显示活动规则 */
const showRules = useCallback(() => {
setDisplayRule(true);
}, []);
//#endregion
//=========================================end=================================================//
/**
* 编辑弹窗样式时可视化弹窗
* 做高频编辑防抖处理
*/
const onChangeDebounce = useMemo(
() =>
debounce(() => {
if (
currentEditorStylePath?.length
) {
const { game } = gameHandle.current!;
const path = currentEditorStylePath?.map(
(item) => item.value
);
// 显示通用弹窗
if (path.includes('dialog_overlay') || path.includes('rules_content')) {
showRules();
}
// 显示中奖弹窗
if (path.includes('successmodal_article_content')) {
game.current?.core.showSuccessModal(
prizes[0]
);
}
// 显示未中奖弹窗
if (path.includes('failedmodal_article_content')) {
game.current?.core.showFailedModal(
prizes[1]
);
}
// 显示地址弹窗
if (path.includes('addressmodal_addressbox')) {
game.current?.core.showAddressModal();
}
// 显示中奖记录
if (path.includes('records_content')) {
showRecord();
}
}
}, 1000),
[currentEditorStylePath, prizes, showRecord, showRules]
);
const editorShow = useCallback(() => {
onChangeDebounce();
}, [onChangeDebounce]);
useEffect(() => {
editorShow();
}, [editorShow, gameHandle]);
// lifeCycle
const [dispatchEvent] = useLifeCycle(
moduleId,
{
mount: '初始化',
unmount: '卸载',
onStart: '抽奖',
onEnd: '抽奖结束',
onCancel: '放弃中奖/关闭弹窗',
onEnsure: '确认中奖结果',
onShowSuccess: '显示中奖',
onShowFailed: '显示未中奖',
onShowAddress: '显示地址',
},
{
setGameType,
setRunningPrizes,
setRules,
setRunningRecords,
lottery,
checkedLottery,
useConfig,
setDefaultReceiveInfo,
setSuccessModal,
showRecord,
showRules,
},
api?.find((item) => item.apiId === 'init')
);
// ref存储
dispatchEventRef.current = dispatchEvent;
return (
<Wrapper {...props}>
<Game
parentId={`${MId}_wrap`}
className={`gametarget${moduleId}_gameroot`}
targetId={MId}
playerPhone={phoneAndRCardId?.phone}
successModalTitle={successmodalParams.title || '恭喜您,获得'}
successModalAnimation={{
form: successmodalParams.animation || 'flipInY',
}}
type={type}
cardIdRequest={phoneAndRCardId?.cardIdRequest}
ref={gameHandle}
start={startLottery}
prizes={prizes}
saveAddress={apiSaveAddress}
receiverInfo={receiverInfo}
onCancel={dispatchEventRef.current?.onCancel}
onEnsure={dispatchEventRef.current?.onEnsure}
checkVerificationCode={checkVerificationCode}
onShowSuccess={() => {
dispatchEventRef.current?.onShowSuccess();
}}
onShowFailed={() => {
dispatchEventRef.current?.onShowFailed();
}}
onShowAddress={() => {
dispatchEventRef.current?.onShowAddress();
}}
/>
<GameRecords
id={`${MId}_records`}
visible={displayRecord}
onCancel={() => setDisplayRecord(false)}
disablePullUp={disablePullUp}
disablePullDown={disablePullDown}
onPullDown={async () => console.log()}
onPullUp={async () => console.log()}
>
{renderRecords()}
</GameRecords>
<GameModal
id={`${MId}_rules`}
visible={displayRule}
title="活动规则"
okText="返回抽奖"
onOk={() => setDisplayRule(false)}
onCancel={() => setDisplayRule(false)}
>
<ol className={classNames(s.rule, `${MId}_rules_list`)}>
{ruleText.map((item, key) => (
<li className={`${MId}_rules_list_item`} key={key}>{item}</li>
))}
</ol>
</GameModal>
</Wrapper>
);
};
// bind static
for (const key in config) {
if (Object.prototype.hasOwnProperty.call(config, key)) {
Lottery[key] = config[key];
}
}
export default Lottery; | the_stack |
import {
Admin,
Chat,
Config,
Core,
DatabaseProvider,
Email,
Forms,
PushNotifications,
Router,
SMS,
Storage,
} from './modules';
import { Authentication } from './modules/authentication';
import Crypto from 'crypto';
import { EventBus } from './utilities/EventBus';
import { RedisManager } from './utilities/RedisManager';
import { StateManager } from './utilities/StateManager';
import { CompatServiceDefinition } from 'nice-grpc/lib/service-definitions';
import { ConduitModule } from './classes/ConduitModule';
import { Client } from 'nice-grpc';
import { status } from '@grpc/grpc-js';
import { sleep } from './utilities';
import {
HealthDefinition,
HealthCheckResponse_ServingStatus,
} from './protoUtils/grpc_health_check';
import {
GetRedisDetailsResponse,
ModuleListResponse_ModuleResponse,
} from './protoUtils/core';
import { HealthCheckStatus } from './types';
import { createSigner } from 'fast-jwt';
export default class ConduitGrpcSdk {
private readonly serverUrl: string;
private readonly _watchModules: boolean;
private readonly _core?: Core;
private readonly _config?: Config;
private readonly _admin?: Admin;
private readonly _router?: Router;
private readonly _modules: { [key: string]: ConduitModule<any> } = {};
private readonly _availableModules: any = {
database: DatabaseProvider,
storage: Storage,
email: Email,
pushNotifications: PushNotifications,
authentication: Authentication,
sms: SMS,
chat: Chat,
forms: Forms,
};
private _dynamicModules: { [key: string]: CompatServiceDefinition } = {};
private _eventBus?: EventBus;
private _stateManager?: StateManager;
private lastSearch: number = Date.now();
private readonly name: string;
private readonly _serviceHealthStatusGetter: Function;
private readonly _grpcToken?: string;
private _initialized: boolean = false;
constructor(
serverUrl: string,
serviceHealthStatusGetter: Function,
name?: string,
watchModules = true,
) {
if (!name) {
this.name = 'module_' + Crypto.randomBytes(16).toString('hex');
} else {
this.name = name;
}
this.serverUrl = serverUrl;
this._watchModules = watchModules;
this._serviceHealthStatusGetter = serviceHealthStatusGetter;
const grpcKey = process.env.GRPC_KEY;
if (grpcKey) {
const sign = createSigner({ key: grpcKey });
this._grpcToken = sign({
moduleName: this.name,
});
}
}
async initialize() {
if (this.name === 'core') {
this._initialize();
} else {
(this._core as unknown) = new Core(this.name, this.serverUrl, this._grpcToken);
console.log('Waiting for Core...');
const delay = this.name === 'database' ? 250 : 1000;
while (true) {
try {
const state = await this.core.check();
if (
this.name === 'database' ||
((state as unknown) as HealthCheckStatus) === HealthCheckStatus.SERVING
) {
console.log('Core connection established');
this._initialize();
break;
}
} catch (err) {
if (err.code === status.PERMISSION_DENIED) {
console.error(err);
process.exit(-1);
}
await sleep(delay);
}
}
}
}
private _initialize() {
if (this._initialized)
throw new Error("Module's grpc-sdk has already been initialized");
(this._config as unknown) = new Config(
this.name,
this.serverUrl,
this._serviceHealthStatusGetter,
this._grpcToken,
);
(this._admin as unknown) = new Admin(this.name, this.serverUrl, this._grpcToken);
(this._router as unknown) = new Router(this.name, this.serverUrl, this._grpcToken);
this.initializeModules().then();
if (this._watchModules) {
this.watchModules();
}
this._initialized = true;
}
get bus(): EventBus | null {
if (this._eventBus) {
return this._eventBus;
} else {
console.warn('Event bus not initialized');
return null;
}
}
get state(): StateManager | null {
if (this._stateManager) {
return this._stateManager;
} else {
console.warn('State Manager not initialized');
return null;
}
}
get core(): Core {
return this._core!;
}
get config(): Config {
return this._config!;
}
get admin(): Admin {
return this._admin!;
}
get router(): Router {
return this._router!;
}
get database(): DatabaseProvider | null {
if (this._modules['database']) {
return this._modules['database'] as DatabaseProvider;
} else {
console.warn('Database provider not up yet!');
return null;
}
}
get databaseProvider(): DatabaseProvider | null {
return this.database;
}
get storage(): Storage | null {
if (this._modules['storage']) {
return this._modules['storage'] as Storage;
} else {
console.warn('Storage module not up yet!');
return null;
}
}
get forms(): Forms | null {
if (this._modules['forms']) {
return this._modules['forms'] as Forms;
} else {
console.warn('Forms module not up yet!');
return null;
}
}
get emailProvider(): Email | null {
if (this._modules['email']) {
return this._modules['email'] as Email;
} else {
console.warn('Email provider not up yet!');
return null;
}
}
get pushNotifications(): PushNotifications | null {
if (this._modules['pushNotifications']) {
return this._modules['pushNotifications'] as PushNotifications;
} else {
console.warn('Push notifications module not up yet!');
return null;
}
}
get authentication(): Authentication | null {
if (this._modules['authentication']) {
return this._modules['authentication'] as Authentication;
} else {
console.warn('Authentication module not up yet!');
return null;
}
}
get sms(): SMS | null {
if (this._modules['sms']) {
return this._modules['sms'] as SMS;
} else {
console.warn('SMS module not up yet!');
return null;
}
}
get chat(): Chat | null {
if (this._modules['chat']) {
return this._modules['chat'] as Chat;
} else {
console.warn('Chat module not up yet!');
return null;
}
}
watchModules() {
const emitter = this.config.getModuleWatcher();
this.config.watchModules().then();
emitter.on('serving-modules-update', (modules: any) => {
Object.keys(this._modules).forEach(r => {
const found = modules.filter(
(m: ModuleListResponse_ModuleResponse) => m.moduleName === r && m.serving,
);
if ((!found || found.length === 0) && this._availableModules[r]) {
this._modules[r]?.closeConnection();
emitter.emit(`module-connection-update:${r}`, false);
}
});
modules.forEach((m: ModuleListResponse_ModuleResponse) => {
if (m.serving) {
if (this._modules[m.moduleName] && this._availableModules[m.moduleName]) {
this._modules[m.moduleName]?.openConnection();
}
this.createModuleClient(m.moduleName, m.url);
emitter.emit(`module-connection-update:${m.moduleName}`, true);
}
});
});
}
async monitorModule(
moduleName: string,
callback: (serving: boolean) => void,
wait: boolean = true,
) {
if (wait) await this.waitForExistence(moduleName);
this._modules['chat']?.healthClient
?.check({})
.then(res => {
callback(res?.status === HealthCheckResponse_ServingStatus.SERVING);
})
.catch(() => {
callback(false);
});
const emitter = this.config.getModuleWatcher();
emitter.on(`module-connection-update:${moduleName}`, callback);
}
unmonitorModule(moduleName: string) {
const emitter = this.config.getModuleWatcher();
emitter.removeAllListeners(`module-connection-update:${moduleName}`);
}
initializeEventBus(): Promise<EventBus> {
return this.config
.getRedisDetails()
.then((r: GetRedisDetailsResponse) => {
let redisManager = new RedisManager(r.redisHost, r.redisPort);
this._eventBus = new EventBus(redisManager);
this._stateManager = new StateManager(redisManager, this.name);
return this._eventBus;
})
.catch((err: Error) => {
console.error('Failed to initialize event bus');
throw err;
});
}
/**
* Gets all the registered modules from the config and creates clients for them.
* This will only work on known modules, since the primary usage for the sdk is internal
*/
initializeModules() {
return this._config!.moduleList()
.then(r => {
this.lastSearch = Date.now();
r.forEach(m => {
this.createModuleClient(m.moduleName, m.url);
});
return 'ok';
})
.catch(err => {
if (err.code !== 5) {
console.error(err);
}
});
}
createModuleClient(moduleName: string, moduleUrl: string) {
if (
this._modules[moduleName] ||
(!this._availableModules[moduleName] && !this._dynamicModules[moduleName])
)
return;
if (this._availableModules[moduleName]) {
this._modules[moduleName] = new this._availableModules[moduleName](
this.name,
moduleUrl,
this._grpcToken,
);
} else if (this._dynamicModules[moduleName]) {
this._modules[moduleName] = new ConduitModule(
this.name,
moduleName,
moduleUrl,
this._grpcToken,
);
this._modules[moduleName].initializeClient(this._dynamicModules[moduleName]);
}
}
moduleClient(name: string, type: CompatServiceDefinition): void {
this._dynamicModules[name] = type;
}
getModule<T extends CompatServiceDefinition>(name: string): Client<T> | undefined {
if (this._modules[name]) return this._modules[name].client!;
}
getHealthClient<T extends CompatServiceDefinition>(
name: string,
): Client<typeof HealthDefinition> | undefined {
if (this._modules[name]) return this._modules[name].healthClient!;
}
isAvailable(moduleName: string) {
return !!(this._modules[moduleName] && this._modules[moduleName].active);
}
async waitForExistence(moduleName: string) {
while (!this._modules[moduleName]) {
await sleep(1000);
}
return true;
}
/**
* Used to refresh all modules to check for new registrations
* @param force If true will check for new modules no matter the interval
*/
async refreshModules(force?: boolean) {
if (this.lastSearch < Date.now() - 3000 || force) {
return this.initializeModules();
}
return 'ok';
}
get grpcToken() {
return this._grpcToken;
}
}
export * from './interfaces';
export * from './classes';
export * from './modules';
export * from './helpers';
export * from './constants';
export * from './types'; | the_stack |
import constants = require('../constants/odata_constants');
import enums = require('../constants/odata_enums');
import {ODataType} from "../constants/odata_enums";
import lbConstants = require('../constants/loopback_constants');
import {LoopbackModelClass, LoopbackModelProperty} from "../types/loopbacktypes";
import {RequestModelClass} from "../types/n_odata_types";
import log4js = require('log4js');
var oDataServerConfig;
/**
* A module for exporting common function that are used by several other
* modules of the odata-server
*
* @param loopbackApplication
* @param options
*/
export = {
setConfig: _setConfig,
getBaseURL: _getBaseURL,
getRequestType: _getRequestType,
getIdFromUrlParameter: _getIdFromUrlParameter,
getPluralForModel: _getPluralForModel,
getModelClass: _getModelClass,
getRequestModelClass: _getRequestModelClass,
getIdByPropertyType: _getIdByPropertyType,
convertType: _convertType
};
var logger = log4js.getLogger("odata");
function _setConfig(config) {
oDataServerConfig = config;
}
/**
* Returns the base URL for the service that consists of
* <protocol>://<host>:<port>
* E.g.: http://127.0.0.1:3000
* @param req
* @returns {string} URL
* @private
*/
function _getBaseURL(req) {
//handle reverse proxy for metadata URI (using X-Forwarded-Host header)
if (req.headers["x-forwarded-host"]) {
if (req.headers["x-forwarded-proto"]) {
return req.headers["x-forwarded-proto"] + '://' + req.headers["x-forwarded-host"] +
'/' + oDataServerConfig.odataPrefix;
}
return req.protocol + '://' + req.headers["x-forwarded-host"] +
'/' + oDataServerConfig.odataPrefix;
}
return req.protocol + '://' + req.hostname +
':' + req.app.get('port') + '/' + oDataServerConfig.odataPrefix;
}
/**
* Retrieve the odata-type of the request. That means for a GET-request
* if it's a
* - Service Document
* - Collection of Entities
* - Entity
* - Singleton
* - Collection of Derived Entities
* - Derived Entity
* - Collection of Entity References
* - Entity Reference
* - Property Value | http://host/service/Customers(1)/Addresses
* - Collection of Complex or Primitive Types
* | http://host/service/TopFiveHobbies()
* - Complex or Primitive Typ | http://host/service/MostPopularName()
* - Operation Result | http://host/service/TopFiveCustomers{}
*
* @param {[type]} req [description]
* @return {[type]} [description]
*/
function _getRequestType(req) {
var retValue = enums.GetRequestTypeEnum.UNDEFINED;
var param0:string = req.params[0];
if (req.params[0] === '')
retValue = enums.GetRequestTypeEnum.SERVICE;
else {
var arrParams:string[] = param0.split('/');
if (param0.toUpperCase() === "$METADATA") {
retValue = enums.GetRequestTypeEnum.METADATA;
} else if (arrParams[arrParams.length - 1] === '$count') {
retValue = enums.GetRequestTypeEnum.COLLECTION_COUNT;
} else if (_isRequestCollection(req)) {
retValue = enums.GetRequestTypeEnum.COLLECTION;
} else if (_isRequestEntity(req)) {
retValue = enums.GetRequestTypeEnum.ENTITY;
} else {
}
}
return retValue;
}
/**
* Determines if the given request is a request for a collection.
* At the moment this function traverses all models and checks it the
* request reflects the plural property of the model. If so, the request is
* considered a ColleectionRequest
* @param req
* @private
*/
function _isRequestCollection(req) {
var retValue = false;
var reqParts = /^([^/(]+)(?:[(](.*)[)])?(?:[/]([A-Za-z]+))?/g.exec(req.params[0]);
// reqParts = [full_match, model, id, property]
// util.inspect(reqParts);
var models = req.app.models();
models.forEach(function (model) {
var plural = _getPluralForModel(model);
if (plural === reqParts[1]) {
//console.log("model found: " + plural);
if (reqParts[2] && reqParts[3]) {
//console.log("navigation by key and property");
var modelRel = model.definition.settings.relations;
//in case property is a relation of type 'hasMany'
if (modelRel && modelRel[reqParts[3]] && modelRel[reqParts[3]].type === lbConstants.LB_REL_HASMANY) {
//console.log("property is a collection");
retValue = true;
} else {
//console.log("property is a entity");
}
} else if (!reqParts[2]) {
//console.log("collection is requested directly");
retValue = true;
} else {
//console.log("request is not a collection");
}
}
});
return retValue;
}
/**
* Determines if the given request is a request for a single entity object
* @param req
* @private
*/
function _isRequestEntity(req) {
var retValue = false;
var reqParts = /^([^/(]+)(?:[(](.*)[)])?(?:[/]([A-Za-z]+))?/g.exec(req.params[0]);
// reqParts = [full_match, model, id, property]
//util.inspect(reqParts);
var models = req.app.models();
models.forEach(function (model) {
var plural = _getPluralForModel(model);
if (plural === reqParts[1]) {
//console.log("model found: " + plural);
if (reqParts[2] && reqParts[3]) {
//console.log("navigation by key and property");
var modelRel = model.definition.settings.relations;
//in case property is a relation of type 'hasMany'
if (modelRel && modelRel[reqParts[3]] && (modelRel[reqParts[3]].type === lbConstants.LB_REL_BELONGSTO || modelRel[reqParts[3]].type === lbConstants.LB_REL_HASONE)) {
//console.log("property is a entity");
retValue = true;
} else {
//console.log("property is a collection");
}
} else if (reqParts[2]) {
//console.log("entity is requested by id");
retValue = true;
} else {
//console.log("collection is requested directly");
}
}
});
return retValue;
}
/**
* Retrieves the plural for the given model. This is either retrieved
* from the settings.plural of the model or if not defined an 's' is appended
* to the model name
* @param model
* @returns {string}
* @private
*/
function _getPluralForModel(model:LoopbackModelClass):string {
var plural = model.definition.settings.plural;
if (!plural) {
plural = model.definition.name + 's';
}
return plural;
}
/**
* Returns the id that was transmitted via the URL, e.g. People('1').
* In this case it extracts 1 as id. Cause a numeric id is translated into EDM.DECIMAL OData clients
* often format the value according to the OData formatting rules. In that case the client submits
* People('1M'). So we have to cut the M to find the record in the database.
* @param param0
* @returns {string}
* @private
*/
function _getIdFromUrlParameter(param0) {
console.log("'_getIdFromUrlParameter' is DEPRECATED! Please use '_getIdByPropertyType' instead.")
var retValue = param0.substring(param0.indexOf('(') + 1, param0.indexOf(')'));
if (retValue.startsWith("'") || retValue.startsWith("\"")) {
retValue = retValue.substring(1, retValue.length - 1);
}
if (retValue.endsWith('M')) {
retValue = retValue.substr(0, retValue.length - 1);
}
return retValue;
}
/**
* Returns the id that was transmitted via the URL, e.g. '1'.
* In this case it extracts 1 as id. Cause a numeric id is translated into EDM.DECIMAL OData clients
* often format the value according to the OData formatting rules. In that case the client submits
* 1.2M. So we have to cut the M to find the record in the database.
* See (e.g. V2): http://www.odata.org/documentation/odata-version-2-0/overview/ (6.)
* @param param0
* @returns {string}
* @private
*/
function _getIdByPropertyType(sRawId, property:LoopbackModelProperty) {
var id;
switch (_convertType(property)) {
case ODataType.EDM_STRING:
//search for anything enclosed by ''
id = (/^['](.*)[']$/g.exec(sRawId) || [undefined, undefined])[1];
break;
case ODataType.EDM_DECIMAL:
id = /^[0-9]+.[0-9]+/g.exec(sRawId)[0];
break;
default:
//for MongoDB generated id collumns
if (property.generated === true && typeof sRawId !== "undefined") {
//if URL starts with ', needs to work too
if (sRawId.charAt(0) === "'") {
//search for anything enclosed by ''
id = (/^['](.*)[']$/g.exec(sRawId) || [undefined, undefined])[1];
} else {
id = sRawId;
}
} else {
//other cases, validating type Edm type
id = sRawId;
}
break;
}
return id;
}
/**
* get the Model for request uri (e.g.: steps(1)/children).
* @param {[type]} models [description]
* @param {[type]} requestUri the request uri
* @return {[type]} Promise that resolves to a ModelClass
*/
function _getRequestModelClass(models:Function, requestUri:string) {
var reqParts = /^([^/(]+)(?:[(](.*)[)])?(?:[/]([A-Za-z]+))?/g.exec(requestUri);
if (!reqParts[3]) {
return new Promise(function (resolve, reject) {
_getModelClass(models, reqParts[1]).then(function (ModelClass) {
if (ModelClass) {
let sRequestId = null;
if(ModelClass.definition._ids && ModelClass.definition._ids[0]) {
sRequestId = _getIdByPropertyType(reqParts[2], ModelClass.definition._ids[0].property);
}
resolve({
modelClass: ModelClass,
foreignKeyFilter: undefined,
requestId: sRequestId
} as RequestModelClass);
} else {
resolve({
modelClass: undefined,
foreignKeyFilter: undefined,
requestId: undefined
} as RequestModelClass);
}
}
)
});
} else {
return new Promise(function (resolve, reject) {
_getModelClass(models, reqParts[1]).then(function (BaseModelClass) {
if (BaseModelClass) {
var modelProps = BaseModelClass.definition.properties;
var modelRel = BaseModelClass.settings.relations;
if (modelRel && modelRel[reqParts[3]]) {
var oFilter = {}, sForeignKey = modelRel[reqParts[3]].foreignKey;
if (sForeignKey == "") {
sForeignKey = reqParts[3] + "Id";
}
let idName = BaseModelClass.getIdName();
var sRequestId = _getIdByPropertyType(reqParts[2], BaseModelClass.definition.properties[idName]);
switch (modelRel[reqParts[3]].type) {
case lbConstants.LB_REL_HASMANY:
//TODO: composite id support
oFilter[sForeignKey] = sRequestId;
//oFilter[sForeignKey] = reqParts[2];
if (!oFilter[sForeignKey]) {
reject("Invalid id");
} else {
_getModelClass(models, modelRel[reqParts[3]].model).then(function (ModelClass) {
resolve({
modelClass: ModelClass,
foreignKeyFilter: oFilter,
requestId: sRequestId
} as RequestModelClass);
});
}
break;
case lbConstants.LB_REL_BELONGSTO:
BaseModelClass.findById(sRequestId, function (error, instance) {
if (instance) {
_getModelClass(models, modelRel[reqParts[3]].model).then(function (ModelClass) {
let idName = ModelClass.getIdName();
oFilter[idName] = instance[sForeignKey];
resolve({
modelClass: ModelClass,
foreignKeyFilter: oFilter,
requestId: sRequestId
} as RequestModelClass);
});
} else {
reject("Entity not found!");
}
});
break;
case lbConstants.LB_REL_HASONE:
//TODO: composite id support
oFilter[sForeignKey] = sRequestId;
//oFilter[sForeignKey] = reqParts[2];
if (!oFilter[sForeignKey]) {
reject("Invalid id");
} else {
_getModelClass(models, modelRel[reqParts[3]].model).then(function (ModelClass) {
resolve({
modelClass: ModelClass,
foreignKeyFilter: oFilter,
requestId: sRequestId
} as RequestModelClass);
});
}
break;
default:
var str = modelRel[reqParts[3]].type + " relations not supported yet";
logger.warn(str);
reject(str);
break;
//TODO: lbConstants.LB_REL_HASONE
}
} else {
resolve({modelClass: BaseModelClass} as RequestModelClass);
}
} else {
resolve({
modelClass: undefined,
foreignKeyFilter: undefined,
requestId: undefined
} as RequestModelClass);
}
});
});
}
}
/**
* get the Model for a className. The className must be equal to
* the pluralModelName of the Model itself.
* @param {[type]} models [description]
* @param {[type]} className The name of the class
* @return {[type]} Promise that resolves to a ModelClass
*/
function _getModelClass(models:any, className:string) {
return new Promise<any>((resolve, reject) => {
var ModelClass;
if (className === "$metadata") {
resolve(undefined);
}
if (className.indexOf('(') !== -1) {
// its a request for a single entity object
className = className.substr(0, className.indexOf('('));
} else {
// Try to get the singular class first
ModelClass = models[className];
}
// Now try to get the class by it's plural definition
// In this case its a collection
if (!ModelClass) {
for (let modelStr in models) {
var model = models[modelStr];
if (model.definition.settings.plural === className) {
ModelClass = model;
break; // return from forEach
} else {
var plural = model.definition.name + 's';
if (plural === className) {
ModelClass = model;
break; // return from forEach
}
}
}
;
}
resolve(ModelClass);
})
}
/**
* converts a loopback datatype into a OData datatype
* @param property, loopback property
* @private
*/
function _convertType(property:LoopbackModelProperty):ODataType {
var retValue:ODataType;
var dbType = property.type.name;
switch (dbType) {
case "String":
retValue = ODataType.EDM_STRING;
break;
case "Date":
retValue = ODataType.EDM_DATETIME
break;
case "Number":
if (property.id) {
retValue = ODataType.EDM_INT32
} else {
retValue = ODataType.EDM_DECIMAL
}
break;
case "Boolean":
retValue = ODataType.EDM_BOOLEAN
break;
default:
break;
}
return retValue;
} | the_stack |
import test from 'ava';
import {JsonPropertyOrder} from '../src/decorators/JsonPropertyOrder';
import {ObjectMapper} from '../src/databind/ObjectMapper';
import {JsonProperty} from '../src/decorators/JsonProperty';
import {JsonRootName} from '../src/decorators/JsonRootName';
import {JsonClassType} from '../src/decorators/JsonClassType';
import {JsonGetter} from '../src/decorators/JsonGetter';
test('class without @JsonPropertyOrder', t => {
class User {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, email: string, name: string) {
this.id = id;
this.email = email;
this.name = name;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"id":1,"email":"john.alfa@gmail.com","name":"John Alfa"}'));
});
test('@JsonPropertyOrder at class level with value', t => {
@JsonPropertyOrder({value: ['email', 'id', 'name']})
class User {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, email: string, name: string) {
this.id = id;
this.email = email;
this.name = name;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
t.is(jsonData, '{"email":"john.alfa@gmail.com","id":1,"name":"John Alfa"}');
});
test('@JsonPropertyOrder at class level with partial order', t => {
@JsonPropertyOrder({value: ['email', 'lastname']})
class User {
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
firstname: string;
@JsonProperty() @JsonClassType({type: () => [String]})
lastname: string;
constructor(id: number, email: string, firstname: string, lastname: string) {
this.id = id;
this.email = email;
this.firstname = firstname;
this.lastname = lastname;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John', 'Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
t.assert(jsonData.startsWith('{"email":"john.alfa@gmail.com","lastname":"Alfa",'));
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"email":"john.alfa@gmail.com","lastname":"Alfa","id":1,"firstname":"John"}'));
});
test('@JsonPropertyOrder at class level with alphabetic order', t => {
@JsonPropertyOrder({alphabetic: true})
class User {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, email: string, name: string) {
this.id = id;
this.email = email;
this.name = name;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
t.is(jsonData, '{"email":"john.alfa@gmail.com","id":1,"name":"John Alfa"}');
});
test('@JsonPropertyOrder at class level with value and alphabetic order', t => {
@JsonPropertyOrder({value: ['name'], alphabetic: true})
class User {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
constructor(id: number, email: string, name: string) {
this.id = id;
this.email = email;
this.name = name;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
t.is(jsonData, '{"name":"John Alfa","email":"john.alfa@gmail.com","id":1}');
});
test('@JsonPropertyOrder at class level with value and @JsonRootName', t => {
@JsonRootName()
@JsonPropertyOrder({value: ['email', 'id', 'name']})
class User {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, email: string, name: string) {
this.id = id;
this.email = email;
this.name = name;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John Alfa');
const objectMapper = new ObjectMapper();
objectMapper.defaultStringifierContext.features.serialization.WRAP_ROOT_VALUE = true;
const jsonData = objectMapper.stringify<User>(user);
t.is(jsonData, '{"User":{"email":"john.alfa@gmail.com","id":1,"name":"John Alfa"}}');
});
test('@JsonPropertyOrder at property level on Array', t => {
class Book {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
category: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, name: string, category: string) {
this.id = id;
this.name = name;
this.category = category;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonPropertyOrder({value: ['category', 'id', 'name']})
@JsonClassType({type: () => [Array, [Book]]})
books: Book[] = [];
constructor(id: number, name: string, @JsonClassType({type: () => [Array, [Book]]}) books: Book[]) {
this.id = id;
this.name = name;
this.books = books;
}
}
const book = new Book(42, 'Learning TypeScript', 'Web Development');
const writer = new Writer(1, 'John', [book]);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
t.assert(jsonData.includes('[{"category":"Web Development","id":42,"name":"Learning TypeScript"}]'));
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"id":1,"name":"John","books":[{"category":"Web Development","id":42,"name":"Learning TypeScript"}]}'));
});
test('@JsonPropertyOrder at property level on Map', t => {
class Book {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
category: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, name: string, category: string) {
this.id = id;
this.name = name;
this.category = category;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Map, [String, Book]]})
@JsonPropertyOrder({value: ['category', 'id', 'name']})
bookMap: Map<string, Book> = new Map<string, Book>();
constructor(id: number, name: string) {
this.id = id;
this.name = name;
}
}
const book1 = new Book(42, 'Learning TypeScript', 'Web Development');
const book2 = new Book(21, 'Learning Spring', 'Java');
const writer = new Writer(1, 'John');
writer.bookMap.set('book 2', book2);
writer.bookMap.set('book 1', book1);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
// eslint-disable-next-line max-len
t.assert(jsonData.includes('{"book 2":{"category":"Java","id":21,"name":"Learning Spring"},"book 1":{"category":"Web Development","id":42,"name":"Learning TypeScript"}}'));
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"bookMap":{"book 2":{"category":"Java","id":21,"name":"Learning Spring"},"book 1":{"category":"Web Development","id":42,"name":"Learning TypeScript"}},"id":1,"name":"John"}'));
});
test('@JsonPropertyOrder at method level', t => {
class Book {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty() @JsonClassType({type: () => [String]})
category: string;
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
constructor(id: number, name: string, category: string) {
this.id = id;
this.name = name;
this.category = category;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
books: Book[] = [];
constructor(id: number, name: string, @JsonClassType({type: () => [Array, [Book]]}) books: Book[]) {
this.id = id;
this.name = name;
this.books = books;
}
@JsonGetter()
@JsonPropertyOrder({value: ['category', 'id', 'name']})
@JsonClassType({type: () => [Array, [Book]]})
getBooks(): Book[] {
return this.books;
}
}
const book = new Book(42, 'Learning TypeScript', 'Web Development');
const writer = new Writer(1, 'John', [book]);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
t.assert(jsonData.includes('[{"category":"Web Development","id":42,"name":"Learning TypeScript"}]'));
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"id":1,"name":"John","books":[{"category":"Web Development","id":42,"name":"Learning TypeScript"}]}'));
}); | the_stack |
"use strict";
// uuid: 3b581118-d68b-4f8e-8663-99ec6d761d04
// ------------------------------------------------------------------------
// Copyright (c) 2018 Alexandre Bento Freire. All rights reserved.
// Licensed under the MIT License+uuid License. See License.txt for details
// ------------------------------------------------------------------------
// Implements story class
/** @module end-user | The lines bellow convey information for the end-user */
/**
* ## Description
*
* A **story** is the entry point of ABeamer web browser library.
* It has the following functions:
*
* - Manage multiple scenes, including insert and remove.
* - Manage scene transitions.
* - Manage flyovers.
* - Start the rendering process.
* - Communicate with the render server agent.
* - Load the complete story from a configuration file.
*
* The workflow summary is:
*
* - A story automatically creates DOM scenes.
* - In each scene, the user adds its animations.
* - The user executes `story.render` which will process the animation pipeline
* frame by frame and sends it to the render server agent.
* - The render server agent will communicate with a headless server such as puppeteer
* to store each frame on the disk.
*
* @see workflow
*/
namespace ABeamer {
// #generate-group-section
// ------------------------------------------------------------------------
// Story
// ------------------------------------------------------------------------
// The following section contains data for the end-user
// generated by `gulp build-definition-files`
// -------------------------------
// #export-section-start: release
export type DoneFunc = () => void;
export type WaitFunc = (args: ABeamerArgs, params: AnyParams,
onDone: DoneFunc) => void;
export interface WaitMan {
addWaitFunc(func: WaitFunc, params: AnyParams): void;
}
export type DirectionInt = -1 | 1;
export type LogType = 0 | 1 | 2;
/** Scene by Object, index or name */
export type SceneHandler = Scene | uint | string;
export interface StoryConfig {
config: {
abeamer: any;
};
}
export interface StoryMetadata {
version?: string;
author?: string;
email?: string;
copyrights?: string;
categories?: string[];
keywords?: string[];
comments?: string[];
rating?: uint;
/** ISO DateTime stamp of the creation time. */
createdDateTime?: string;
/** ISO DateTime stamp of the last modified time. */
modifiedDateTime?: string;
}
/** Parameters passed to the story when it's created */
export interface CreateStoryParams {
/** Adds automatically the default scenes. */
dontAddDefaultScenes?: boolean;
/** Defines if the story is going to be teleported. */
toTeleport?: boolean;
/**
* Set this parameter to false,
* if you need to inject html/css code into the page before the teleporting
* process starts.
* @default true
*/
toStartTeleporting?: boolean;
/**
* Set this value, if you need use a Virtual Story Adapter inside of the
* default DOM Adapter.
*/
storyAdapter?: SceneAdapter;
/**
* Defines the log level. Use `LL_VERBOSE` for debugging.
* The server can modify this mode.
* Set this parameter to log during the constructor phase,
* otherwise is also possible to set later via `story.logLevel`.
*/
logLevel?: uint;
}
/**
* ABeamer still has to render all the previous frames
* of active scene bypassing the middle frames,
* but it won't be render to disk and it will bypass any unnecessary frames.
*/
export interface RenderFrameOptions {
/**
* First render frame.
* If `startScene` isn't defined it will be relative to the story, otherwise is
* relative to the `startScene`.
*
* @default 0
*/
renderPos?: TimeHandler;
/**
* Total number of frames to render.
*
* **EXPERIMENTAL** Use a negative value to render backwards.
* For backward rendering, ABeamer first has to consume all the frames forward,
* bypassing all middle frames, only after can render backwards.
*
* @default the total number of frames
*/
renderCount?: TimeHandler;
/**
* First scene to be rendered.
* Before start rendering the `startScene`, first,
* ABeamer first has to consume all the frames until it reaches the
* beginning of Scene.
* Accepts by 'Scene Zero-Based Index', 'Name' or by 'Scene Object'
*/
startScene?: SceneHandler;
/**
* Last scene to be rendered.
* Accepts by 'Scene Zero-Based Index', 'Name' or by 'Scene Object'.
*/
endScene?: SceneHandler;
}
// #export-section-end: release
// -------------------------------
// ------------------------------------------------------------------------
// Implementation
// ------------------------------------------------------------------------
export interface _WaitFunc {
func: WaitFunc;
params: AnyParams;
}
export class _WaitMan implements WaitMan {
funcs: _WaitFunc[] = [];
pos: uint = 0;
addWaitFunc(func: WaitFunc, params: AnyParams): void {
this.funcs.push({ func, params });
}
}
interface _RenderFrameOptionsEx extends RenderFrameOptions {
playSpeedMs?: uint;
}
/**
* Implementation of _Story class.
*/
export class _Story implements _StoryImpl {
// protected
protected _isServerReady: boolean = false;
protected _width: uint = 0;
protected _height: uint = 0;
protected _frameCount: uint = 0;
protected _frameCountChanged: boolean = false;
protected _renderFramePos: uint = 0;
protected _renderDir: -1 | 1 = 1;
protected _renderFrameEnd: uint = 0;
protected _renderFrameCount: uint = 0;
protected _renderStage: uint = 0;
protected _renderPlaySpeed: uint = 0;
protected _renderTimeStamp: Date;
protected _isRendering: boolean = false;
protected _scenes: _SceneImpl[] = [];
protected _curScene: _SceneImpl | undefined = undefined;
protected _renderTimer: number | undefined;
protected _wkFlyovers: _WorkFlyover[] = [];
protected _queueRenders: _RenderFrameOptionsEx[] = [];
protected _isTeleporting: boolean;
protected _metadata: StoryMetadata = {};
_teleporter: _Teleporter;
_waitMan: _WaitMan;
// scene access
_args: ABeamerArgs = {
renderNr: 0,
stage: AS_UNKNOWN,
vars: _vars,
};
// public
/**
* Frames per second.
* All the time values is converted into frames using this value.
* Defined during the call to `createStory`.
*
* #end-user @readonly
*/
fps: uint;
/**
* Default unit used when input time values are used in numeric forms.
* Supports minutes, seconds, milliseconds and frames.
* `TimeUnit.f = 0;`
* `TimeUnit.ms = 1;`
* `TimeUnit.s = 2;`
* `TimeUnit.m = 3;`
*/
defaultUnit: TimeUnit = TimeUnit.f;
/**
* True if it's running a supporting server program for frame storage.
*
* #end-user @readonly
*/
hasServer: boolean = false;
/**
* The name of the server.
* The server will assign this property value.
*
* #end-user @readonly
*/
serverName: string;
/**
* Provides information about the running server.
*/
serverFeatures: ServerFeatures;
/**
* Numerical value of the frame width in pixels.
*
* #end-user @readonly
*/
get metadata(): StoryMetadata {
return this._metadata;
}
/**
* Numerical value of the frame width in pixels.
*
* #end-user @readonly
*/
get width(): uint {
return this._width;
}
/**
* Numerical value of the frame height in pixels.
*
* #end-user @readonly
*/
get height(): uint {
return this._height;
}
/**
* Total number of frames from all the scenes.
*
* #end-user @readonly
*/
get frameCount(): uint {
this._calcFrameCount();
return this._frameCount;
}
/**
* Render direction. 1 for forward, -1 for backwards.
* Defined during the call to `story.render`.
*
* #end-user @readonly
*/
get renderDir(): DirectionInt { return this._renderDir; }
/**
* The number of the last frame to be rendered within the story.
* Defined during the call to `story.render`.
* This value doesn't changes during the rendering process.
*
* #end-user @readonly
*/
get renderFrameEnd(): uint { return this._renderFrameEnd; }
/**
* The number of the current frame being rendered.
* Defined during the call to `story.render`.
* This value changes during the rendering process.
*
* #end-user @readonly
*/
get renderFramePos(): uint { return this._renderFramePos; }
/**
* The number of the current frame being rendered.
* Defined during the call to `story.render`.
* This value doesn't changes during the rendering process.
*
* #end-user @readonly
*/
get renderFrameCount(): uint { return this._renderFrameCount; }
/**
* True if the rendering has started.
* Use `finishRender` to abort the rendering process.
*
* #end-user @readonly
*/
get isRendering(): boolean { return this._isRendering; }
/**
* True if it's teleporting.
*
* #end-user @readonly
*/
get isTeleporting(): boolean { return this._isTeleporting; }
/**
* Returns ABeamerArgs.
* This should be used only in specific cases such the access to --var.
* In most cases, this property is passed as an argument to plugins and callbacks.
*
* #end-user @readonly
*/
get args(): ABeamerArgs { return this._args; }
/**
* If true, the input parameters have strict checks and throw errors if fails.
* If false, ABeamer is more relax and bypasses errors.
* The server can modify this mode on startup.
*
* @default false
*/
_strictMode: boolean = false;
get strictMode(): boolean {
return this._strictMode;
}
set strictMode(newStrictMode: boolean) {
this._strictMode = newStrictMode;
this._args.isStrict = newStrictMode;
}
/**
* Defines the log level. Use `LL_VERBOSE` for debugging.
* The server can modify this mode.
*/
_logLevel: uint = 0;
get logLevel(): uint { return this._logLevel; }
set logLevel(newLogLevel: uint) {
this._logLevel = newLogLevel;
this._isVerbose = newLogLevel >= LL_VERBOSE;
this._args.isVerbose = this._isVerbose;
}
_isVerbose: boolean;
get isVerbose(): boolean { return this._isVerbose; }
/**
* List of the scenes.
*
* #end-user @readonly
*/
get scenes(): Scene[] { return this._scenes as Scene[]; }
/**
* Current active and visible scene.
* Valid both for adding animations and rendering.
* Use `gotoScene` to change this value.
*
* #end-user @readonly
*/
get curScene(): Scene | undefined { return this._curScene; }
/**
* Allows flyovers to find elements on the document body or
* in the virtual world.
*/
storyAdapter: SceneAdapter;
/**
* If true, it terminates the server
* when the render call finishes or if it's aborted.
* For a serverless execution, it has no effect.
*
* @default true
*/
toExitOnRenderFinished: boolean = true;
// @TODO: Implement the usage of this event
/**
* Event triggered when render finished the rendering process.
* **EXPERIMENTAL** The behavior can changed or be deprecated.
*/
onRenderFinished?: (args?: ABeamerArgs) => void;
/**
* Event triggered by server, stating that it's ready to receive frames.
* A server is usually a headless capture program such as puppeteer.
* If the animation is running without server, this event is never fired.
*/
onServerReady?: (args?: ABeamerArgs) => void;
/**
* Event triggered before a frame is rendered.
*/
onBeforeRenderFrame?: (args?: ABeamerArgs) => void;
/**
* Event triggered after a frame is rendered.
*/
onAfterRenderFrame?: (args?: ABeamerArgs) => void;
/**
* Event triggered during the rendering process after a frame is rendered
* and is ready to move to following frame.
*/
onNextFrame?: (args?: ABeamerArgs) => void;
/**
* Maps Ids into Virtual Elements
* Used only in non-DOM elements such as WebGL elements.
*
* @param id virtual element Id without '%'
*/
onGetVirtualElement?: (id: string, args?: ABeamerArgs) => VirtualElement;
_virtualAnimators?: VirtualAnimator[] = [];
/**
* @deprecated Use addVirtualAnimator instead.
* The direct access to virtualAnimators in future versions will likely be disabled
*
*/
get virtualAnimators(): VirtualAnimator[] {
console.log(`virtualAnimators has been deprecated, use addVirtualAnimator`);
return this._virtualAnimators;
}
/**
* Internal map from selectors to VirtualAnimators.
* Each animator must have a unique selector, but future versions
* might support different animators have the selector.
*/
protected _virtualAnimatorMap: { [selector: number]: VirtualAnimator } = {};
/**
* Sets up the Story and adds the Default Scenes.
*/
constructor(cfg: _InnerConfig, createParams: CreateStoryParams) {
const urlParams = window.location.search || '';
const args = this._args;
const self = this;
this.logLevel = createParams.logLevel || LL_SILENT;
if (urlParams) {
urlParams.replace(new RegExp(_SRV_CNT.LOG_LEVEL_SUFFIX + '(\\d+)'), (_all, p1) => {
this.logLevel = parseInt(p1); // don't use _logLevel
return '';
});
}
_initBrowser(args);
this._waitMan = new _WaitMan();
args.waitMan = this._waitMan;
this.storyAdapter = createParams.storyAdapter || new _DOMSceneAdapter('.abeamer-story');
cfg.fps = cfg.fps || this.storyAdapter.getProp('fps', args) as uint;
throwIfI8n(!isPositiveNatural(cfg.fps), Msgs.MustNatPositive, { p: 'fps' });
_vars.fps = cfg.fps;
function setDim(srvPropPrefix: string, cfgValue: uint, propName: string): uint {
let res = cfgValue || self.storyAdapter.getProp(propName, args) as uint;
if (urlParams) {
urlParams.replace(new RegExp(srvPropPrefix + '(\\d+)'), (_all, p1) => {
const qsValue = parseInt(p1);
res = qsValue || res;
return '';
});
}
throwIfI8n(!isPositiveNatural(res), Msgs.MustNatPositive, { p: propName });
self.storyAdapter.setProp(propName, res, args);
return res;
}
this._width = cfg.width = setDim(_SRV_CNT.WIDTH_SUFFIX, cfg.width, 'frame-width');
_vars.frameWidth = cfg.width;
this._height = cfg.height = setDim(_SRV_CNT.HEIGHT_SUFFIX, cfg.height, 'frame-height');
_vars.frameHeight = cfg.height;
// setting clip-path is used because of slide transitions that display outside
// the story boundaries
this.storyAdapter.setProp('clip-path',
`polygon(0 0, 0 ${cfg.height}px, ${cfg.width}px ${cfg.height}px, ${cfg.width}px 0px)`, args);
args.story = this;
this.fps = cfg.fps;
this._isTeleporting = createParams.toTeleport || false;
if (urlParams) {
urlParams.replace(new RegExp(_SRV_CNT.TELEPORT_SUFFIX + '(\\w+)'), (_all, p1) => {
this._isTeleporting = p1 === 'true';
return '';
});
urlParams.replace(new RegExp(_SRV_CNT.SERVER_SUFFIX + '(\\w+)'), (_all, p1) => {
this.hasServer = true;
this.storyAdapter.setProp('class', 'has-server', args);
this.serverName = p1;
this.serverFeatures = _setServer(this.serverName);
return '';
});
urlParams.replace(new RegExp(_SRV_CNT.RENDER_VAR_SUFFIX + '([^&]+)', 'g'), (_all, p1) => {
p1 = decodeURIComponent(p1);
// tslint:disable-next-line:prefer-const
let [, key, value] = p1.match(/^([^=]+)=(.*)$/) || ['', '', ''];
if (!key) {
throw `var ${p1} requires the key field`;
}
key = key.replace(/-(\w)/g, (_all2, p: string) => p.toUpperCase());
args.vars[key] = value;
return '';
});
}
args.hasServer = this.hasServer;
args.isTeleporting = this._isTeleporting;
args.vars.isTeleporting = args.isTeleporting;
this._teleporter = new _Teleporter(this, cfg, this._isTeleporting);
if (this._isTeleporting && createParams.toStartTeleporting !== false) {
this.startTeleporting();
}
// #debug-start
if (this._isVerbose) {
this.logFrmt('story', [
['fps', this.fps],
['urlParams', urlParams],
['hasServer', this.hasServer.toString()],
['hasStory', this._teleporter.hasStory.toString()],
['frame-width', this._width],
['frame-height', this._height],
['browser.isMsIE', browser.isMsIE.toString()],
['browser.vendorPrefix', browser.vendorPrefix],
['toTeleport', this._isTeleporting.toString()],
]);
}
// #debug-end
if (this._teleporter.hasStory) {
this._teleporter._rebuildStory();
} else {
if (!createParams.dontAddDefaultScenes) { this.addDefaultScenes(); }
}
}
// ------------------------------------------------------------------------
// Virtual Animators
// ------------------------------------------------------------------------
/**
* Adds a [](VirtualAnimator) to the story.
* Use [story.removeVirtualAnimator](#storyremovevirtualanimator) to take it from the story.
*/
addVirtualAnimator(animator: VirtualAnimator): void {
const selector = animator.selector;
if (!selector) {
throwI8n(Msgs.NoEmptySelector);
}
if (this._virtualAnimatorMap[selector]) {
throwErr(`The selector must be unique`);
}
this._virtualAnimators.push(animator);
this._virtualAnimatorMap[selector] = animator;
}
/**
* Removes a [](VirtualAnimator) to the story.
* Use [story.addVirtualAnimator](#storyaddvirtualanimator) to add it to the story.
*/
removeVirtualAnimator(animator: VirtualAnimator): void {
const index = this._virtualAnimators.indexOf(animator);
if (index !== -1) {
this._virtualAnimators.splice(index, 1);
// Although there is _virtualAnimatorMap and provides faster access,
// until the access to virtualAnimators has been disabled, it can't be used.
delete this._virtualAnimatorMap[animator.selector];
}
}
// ------------------------------------------------------------------------
// Scenes
// ------------------------------------------------------------------------
/**
* Adds scenes defined in the html by `.abeamer-scene` class.
* These classes are added automatically during Story constructor.
* Add only after a `story.reset()`.
*/
addDefaultScenes(): void {
const story = this;
$('.abeamer-scene').each((_index, htmlElement) => {
story.addScene($(htmlElement));
});
if (this._scenes.length) { this.gotoScene(this._scenes[0]); }
}
/**
* Adds a scene to the story.
* HTML elements with abeamer-scene class are added automatically.
*
* @param sceneSelector DOM selector, JQuery object or Virtual Scene
* @returns A pointer to newly created scene
*/
addScene(sceneSelector: SceneSelector): Scene {
const scene = new _Scene(this as _StoryImpl, sceneSelector,
this._scenes.length ? this._scenes[this._scenes.length - 1] : undefined);
this._scenes.push(scene);
this._setFrameCountChanged();
if (this._teleporter.active) { this._teleporter._addScene(); }
return scene;
}
/**
* Removes a frame from scene list and removes the its rendering pipeline
* but its DOM elements aren't removed.
*/
removeScene(scene: Scene): void {
this._exceptIfRendering();
if (this._curScene === scene) {
(scene as _SceneImpl)._hide();
this._curScene = undefined;
}
(scene as _SceneImpl)._remove();
this._scenes.splice(scene.storySceneIndex, 1);
}
/**
* Rewinds the animation to the start.
* Deletes all the scenes and frames from the story.
* The DOM is left untouched.
*/
clear(): void {
this._exceptIfRendering();
this.rewind();
this._internalGotoScene(undefined);
this.scenes.length = 0;
this._renderFramePos = 0;
this._frameCount = 0;
}
// ------------------------------------------------------------------------
// Animations
// ------------------------------------------------------------------------
/**
* Adds a list scene/serial/parallel animations.
* In essence, it represents the complete storyline.
* Use this method to load the whole storyline from an external file.
* Otherwise is preferable to add animation scene by scene.
*/
addStoryAnimations(sceneSerialAnimes: Animations[][]): void {
sceneSerialAnimes.forEach((sceneSerialAnime, index) => {
this._scenes[index].addSerialAnimations(sceneSerialAnime);
});
}
/**
* Adds a list of serial/parallel animations per scene Id.
* It requires that each scene has defined the id (DOM attribute).
* It bypasses invalid scene ids.
*/
addStoryAnimationsBySceneId(sceneSerialAnimes:
{ [sceneId: string]: Animation[][] }): void {
const ids: { [id: string]: _SceneImpl } = {};
this._scenes.forEach(scene => { ids[scene.id] = scene; });
Object.keys(sceneSerialAnimes).forEach(id => {
const scene = ids[id];
if (scene) { scene.addSerialAnimations(sceneSerialAnimes[id]); }
});
}
// ------------------------------------------------------------------------
// Flyovers
// ------------------------------------------------------------------------
/**
* Adds a flyover, which is a function executed on every render step.
*
* @see flyovers
*/
addFlyover(handler: FlyoverHandler, params?: FlyoverParams): void {
const wkFlyover = _buildWorkFlyover(handler, params, false, this._args);
if (this._isTeleporting && this._args.stage !== AS_ADD_ANIMATION
&& this._scenes.length) {
const scene = this._scenes[0];
scene.addAnimations([{
tasks: [
{
handler: 'add-flyover',
params: {
handler: wkFlyover.name,
params: wkFlyover.params,
} as AddFlyoverTaskParams,
},
],
}]);
} else {
wkFlyover.func(wkFlyover, wkFlyover.params, TS_INIT, this._args);
this._wkFlyovers.push(wkFlyover);
}
}
// ------------------------------------------------------------------------
// Scene Methods
// ------------------------------------------------------------------------
/**
* Changes the active and visible to a different scene.
*/
gotoScene(scene: Scene): void {
if (scene === this._curScene) { return; }
this._exceptIfRendering();
this._calcFrameCount();
this._internalGotoScene(scene as _SceneImpl);
}
/**
* Internal and faster version of gotoScene.
*/
_internalGotoScene(scene: _SceneImpl | undefined): void {
const curScene = this._curScene;
if (curScene) { curScene._hide(); }
this._curScene = scene;
if (scene) { scene._show(); }
this._args.scene = scene;
}
/** Returns the Scene Object which has `sceneName`, undefined otherwise */
findSceneByName(sceneName: string): Scene {
return this._scenes.find(scene => scene.name === sceneName);
}
// ------------------------------------------------------------------------
// Frame Methods
// ------------------------------------------------------------------------
/**
* Signals that there was a change in the number of frames.
* Usually used by `addAnimations`.
*/
_setFrameCountChanged(): void {
this._frameCountChanged = true;
}
/**
* Recomputes the total number of frames as well as other scene parameters
* associated with frames.
*/
_calcFrameCount(): void {
if (!this._frameCountChanged) { return; }
let frameCount = 0;
this._scenes.forEach((scene, index) => {
frameCount += scene._setStoryParams(frameCount, index);
});
this._frameCountChanged = false;
this._frameCount = frameCount;
}
/**
* Rewinds the animation to the start.
*/
rewind(): void {
this.gotoFrame(0);
}
/**
* Moves the current render Position when is not rendering by consuming the pipeline.
* Use this only if you need to compute parameters at certain position.
* When render starts
*/
gotoFrame(framePos: int): void {
this._exceptIfRendering();
this._calcFrameCount();
if (framePos < 0 || framePos >= this._frameCount) {
throwI8n(Msgs.OutOfScope, { p: Msgs.pos });
}
this._internalGotoFrame(framePos);
}
protected _internalGotoFrame(framePos: int): void {
let _curScene = this._curScene;
if (!_curScene
|| !_curScene._internalContainsFrame(framePos)) {
_curScene = this._scenes.find(scene =>
scene._internalContainsFrame(framePos));
this._internalGotoScene(_curScene);
}
_curScene._internalGotoFrame(framePos - _curScene.storyFrameStart);
}
// ------------------------------------------------------------------------
// Transitions
// ------------------------------------------------------------------------
protected _setupTransitions(): void {
this._scenes.forEach((scene) => {
scene._setupTransition();
});
}
// ------------------------------------------------------------------------
// Teleporting
// ------------------------------------------------------------------------
/**
* Returns the animations, html, CSS as an object.
* Use only if `isTeleporting = true`.
* Send this information via Ajax to the remote server.
* Due CORS, it requires a live server to access CSS information.
* Set frameOpts, if you need segment rendering.
* Set isPretty = true, to test only, since this mode will return a formatted output but bigger in size.
*/
getStoryToTeleport(frameOpts?: RenderFrameOptions, isPretty?: boolean): string {
const cfg = this.getStoryToTeleportAsConfig(frameOpts);
return cfg !== undefined ?
JSON.stringify(cfg, undefined, isPretty ? 2 : undefined) : '';
}
/**
* Same as `getStoryToTeleport()` but it returns as `StoryConfig` object.
* Use this function instead of getStoryToTeleport, if you need to
* add extra fields.
* Modifying the content of `config.abeamer` is forbidden for 3rd-party
* remote server rendering.
*/
getStoryToTeleportAsConfig(frameOpts?: RenderFrameOptions): StoryConfig {
if (!this._isTeleporting) {
throw `getStoryToTeleport requires to be in teleporting mode`;
}
if (!this._calcRenderFrameOptions(frameOpts)) {
return undefined;
}
return this._teleporter._getStoryToTeleportAsConfig();
}
/**
* Stores the complete story on a file in the disk.
* Use this method only for testing.
* This method requires that:
*
* 1. Is on teleporting mode `isTeleporting === true`
* 2. The render server agent. `abeamer render ...`
*
* Use this method instead of `getStoryToTeleport`.
*/
teleport(frameOpts?: RenderFrameOptions, isPretty?: boolean) {
if (!this.hasServer) {
console.warn(`To teleport it requires the render server agent to be running`);
}
this._sendCmd(_SRV_CNT.MSG_TELEPORT, this.getStoryToTeleport(frameOpts, isPretty));
}
/**
* Use this method only if you have created the story with `toTeleport = true`
* and `toStartTeleporting = false`, because you need to inject
* html/css code into the page before the teleporting process starts.
* Otherwise, you won't need to call this method since it's called automatically
* if teleporting a story.
*/
startTeleporting(): void {
if (this._isTeleporting) {
this._teleporter.createSnapshot();
}
}
// ------------------------------------------------------------------------
// Render
// ------------------------------------------------------------------------
/**
* Throws exception if is rendering.
*/
_exceptIfRendering(): void {
if (this._isRendering) { throw "Render is still running"; }
}
/**
* Returns the play speed that best matches the fps.
*/
bestPlaySpeed: () => uint = () => Math.abs(1000 / this.fps);
protected _getSceneByHandler(scene: SceneHandler): _SceneImpl {
switch (typeof scene) {
case 'object': return scene as _SceneImpl;
case 'string':
const outScene = this.findSceneByName(scene as string);
throwIfI8n(!outScene, Msgs.Unknown, { p: scene as string });
return outScene as _SceneImpl;
case 'number':
const sceneIdx = scene as number;
if (this._strictMode) {
throwIfI8n(isNotNegativeNatural(sceneIdx),
Msgs.MustNatNotNegative, { p: sceneIdx });
}
if (sceneIdx < 0 || sceneIdx >= this._scenes.length) {
throwI8n(Msgs.OutOfScope, { p: Msgs.pos });
}
return this._scenes[sceneIdx];
}
}
/**
* Computes the render properties from the user frame params.
*/
protected _calcRenderFrameOptions(frameOpts: RenderFrameOptions): boolean {
frameOpts = frameOpts || {};
this._calcFrameCount();
let renderFramePos = parseTimeHandler(frameOpts.renderPos, this._args, 0, 0);
let renderFrameCount = parseTimeHandler(frameOpts.renderCount,
this._args, this._frameCount, this._frameCount);
if (frameOpts.startScene !== undefined) {
const startScene = this._getSceneByHandler(frameOpts.startScene);
renderFramePos += startScene.storyFrameStart;
if (frameOpts.renderCount === undefined) {
renderFrameCount -= startScene.storyFrameStart;
}
}
if (frameOpts.endScene !== undefined) {
const endScene = this._getSceneByHandler(frameOpts.endScene);
if (frameOpts.renderCount === undefined) {
renderFrameCount = endScene.storyFrameStart
+ endScene.frameCount - renderFramePos;
}
}
// needs at least one frame to render
if (!renderFrameCount) { return false; }
const renderFrameDir: -1 | 1 = renderFrameCount > 0 ? 1 : -1;
if (renderFrameDir === -1) {
throwErr('Reverse render isn\'t supported yet');
}
const renderFrameEnd = renderFrameCount > 0
? renderFramePos + renderFrameCount - 1
: renderFramePos + renderFrameCount;
renderFrameCount = Math.abs(renderFrameCount);
if (renderFramePos < 0 || renderFramePos >= this._frameCount) {
throw "Render Pos is out of scope";
}
if (renderFrameEnd < -1 || renderFrameEnd > this._frameCount) {
throw "Render Count is out of scope";
}
this._renderFramePos = renderFramePos;
this._renderDir = renderFrameDir;
this._renderFrameEnd = renderFrameEnd;
this._renderFrameCount = renderFrameCount;
return true;
}
/**
* Starts the Rendering process.
* It can render the whole storyline or just a segment.
* Forward and backward rending is supported.
* If it has a server, such as headless webpage capture program, it will
* render a frame, and send a message to the server to store it on the disk.
* If it's running on the browser, it will render and wait `playSpeedMs` time.
*
* @param playSpeedMs Play speed in milliseconds,
* ignored on server mode. ABeamer doesn't guarantee the exact timing.
* If it's undefined, it will play at full speed.
*/
render(playSpeedMs?: uint | undefined, frameOpts?: RenderFrameOptions): void {
if (this.hasServer && this._isTeleporting) {
this.teleport(frameOpts, true);
this.exit();
return;
}
if (this._isRendering) {
frameOpts = frameOpts || {};
(frameOpts as _RenderFrameOptionsEx).playSpeedMs = playSpeedMs;
this._queueRenders.push(frameOpts);
return;
}
this._internalRender(playSpeedMs, frameOpts);
}
protected _internalRender(playSpeedMs?: uint | undefined,
frameOpts?: RenderFrameOptions): void {
this._args.stage = AS_RENDERING;
this._isRendering = true;
if (!this._calcRenderFrameOptions(this._teleporter._fillFrameOpts(frameOpts))) {
this.finishRender();
return;
}
this._renderStage = 0;
this._waitMan.funcs = [];
this._waitMan.pos = 0;
this._renderPlaySpeed = !this.hasServer && playSpeedMs !== undefined
&& playSpeedMs > 0 ? playSpeedMs : 0;
this._setupTransitions();
this.storyAdapter.setProp('visible', true, this._args);
if (!this.hasServer) {
this._renderLoop();
} else {
this._sendCmd(_SRV_CNT.MSG_READY);
}
}
/**
* Aborts the rendering process.
*/
finishRender(): void {
if (this._isRendering) {
this._isRendering = false;
if (this._renderTimer) {
window.clearTimeout(this._renderTimer);
this._renderTimer = undefined;
}
if (this._queueRenders.length) {
const queuedRender = this._queueRenders[0];
this._queueRenders.splice(0, 1);
this._internalRender(queuedRender.playSpeedMs, queuedRender);
return;
}
this._args.stage = AS_UNKNOWN;
this._sendCmd(_SRV_CNT.MSG_RENDER_FINISHED);
if (this.toExitOnRenderFinished) {
this.exit();
}
}
}
/**
* Renders each frame at a pre-defined speed.
*/
protected _renderLoop() {
let stage = this._renderStage;
const waitMan = this._waitMan;
while (true) {
if (!this._isRendering) { return; }
if (stage && waitMan.funcs.length > waitMan.pos) {
this._renderStage = stage;
const func = waitMan.funcs[waitMan.pos];
waitMan.pos++;
if (waitMan.funcs.length === waitMan.pos) {
waitMan.pos = 0;
waitMan.funcs = [];
}
func.func(this._args, func.params, () => {
this._renderLoop();
});
return;
}
switch (stage) {
case 0: // timestamp
stage++;
this._renderTimeStamp = new Date() as any;
break;
case 1:
stage += 2;
this._internalGotoFrame(this._renderFramePos);
break;
case 3:
stage++;
this._wkFlyovers.forEach(wkFlyover => {
wkFlyover.func(wkFlyover, wkFlyover.params, TS_ANIME_LOOP,
this._args);
});
break;
case 4:
stage++;
if (this.onBeforeRenderFrame) { this.onBeforeRenderFrame(this._args); }
break;
case 5:
stage++;
this._curScene._internalRenderFrame(this._renderFramePos - this._curScene.storyFrameStart,
this._renderDir, this._isVerbose, false);
break;
case 6:
if (this.hasServer) {
stage++;
} else {
stage += 2;
// if hasServer=true, it's better to call before wait for next frame.
if (this.onAfterRenderFrame) { this.onAfterRenderFrame(this._args); }
}
this._renderStage = stage;
const newDate = new Date();
const elapsed = newDate as any - (this._renderTimeStamp as any);
const waitTime = Math.max(this._renderPlaySpeed - Math.floor(elapsed /* / 1000 */), 1);
this._renderTimer = window.setTimeout(() => {
if (!this.hasServer) {
this._renderLoop();
} else {
this._sendCmd(_SRV_CNT.MSG_RENDER);
}
}, waitTime);
return;
case 7:
// this is only called if hasServer
stage++;
if (this.onAfterRenderFrame) { this.onAfterRenderFrame(this._args); }
break;
case 8:
stage = 0;
if (this._renderFramePos !== this._renderFrameEnd) {
this._renderFramePos += this._renderDir;
if (this.onNextFrame) { this.onNextFrame(this._args); }
} else {
this._renderStage = -1;
this.finishRender();
return;
}
break;
}
}
}
// ------------------------------------------------------------------------
// Communicate with the server
// ------------------------------------------------------------------------
protected _sendCmd(cmd: string, value?: string): void {
console.log(_SRV_CNT.MESSAGE_PREFIX + cmd +
(value ? _SRV_CNT.CMD_VALUE_SEP + value : ''));
}
/**
* Terminates the server in case is a headless webpage capture program.
* In other cases, it has no effect.
*/
exit(): void {
this._sendCmd(_SRV_CNT.MSG_EXIT);
}
/**
* Formats a log using a format supported by exact test framework.
* This is mostly used internally for testing, but it's publicly available.
*
* @param params list of [name, value]
*/
logFrmt(tag: string, params: (string | number)[][], logType?: LogType): void {
const msg = `${tag}:` + params.map(param => `${param[0]}=_[${param[1]}]_`)
.join(' ');
switch (logType) {
case LT_WARN: this.logWarn(msg); break;
case LT_ERROR: this.logError(msg); break;
default:
this.logMsg(msg);
}
}
/**
* If a server is present and supports logging,
* it sends a log message to server otherwise it sends to the browser console.
*/
logMsg(msg: string): void {
if (this.hasServer && this.serverFeatures.hasLogging) {
this._sendCmd(_SRV_CNT.MSG_LOG_MSG, msg);
} else {
console.log(msg);
}
}
/**
* If a server is present and supports logging,
* it sends a warn message to server otherwise it sends to the browser console.
*/
logWarn(msg: string): void {
if (this.hasServer && this.serverFeatures.hasLogging) {
this._sendCmd(_SRV_CNT.MSG_LOG_WARN, msg);
} else {
console.warn(msg);
}
}
/**
* If a server is present and supports logging,
* it sends a error message to server otherwise it sends to the browser console.
*/
logError(msg: string): void {
if (this.hasServer && this.serverFeatures.hasLogging) {
this._sendCmd(_SRV_CNT.MSG_LOG_ERROR, msg);
} else {
console.error(msg);
}
}
/**
* This method is called by the server to communicate with the client.
*/
_internalGetServerMsg(cmd: string, _value: string) {
switch (cmd) {
case _SRV_CNT.MSG_SERVER_READY:
this._isServerReady = true;
this.logMsg('Received Server Ready');
this._sendCmd(_SRV_CNT.MSG_SET_FPS, this.fps.toString());
this._sendCmd(_SRV_CNT.MSG_SET_FRAME_COUNT, this._frameCount.toString());
if (this.onServerReady) { this.onServerReady(this._args); }
this._renderLoop();
break;
case _SRV_CNT.MSG_RENDER_DONE:
this._renderLoop();
break;
}
}
// ------------------------------------------------------------------------
// Proxies
// ------------------------------------------------------------------------
getElementAdapters(selector: ElSelectorHandler): ElementAdapter[] {
return _parseInElSelector(this, [], this.storyAdapter, selector);
}
}
// ------------------------------------------------------------------------
// Global Functions
// ------------------------------------------------------------------------
/**
* Creates a story. Loading parameters from style data.
* `fps` property can also be defined on `body.data-fps` or config file.
*
* @see config-file
*/
export function createStory(fps?: uint,
createParams?: CreateStoryParams): Story {
_abeamer = new _Story({ fps }, createParams || {});
return _abeamer;
}
/**
* Creates a story by loading the configuration from a file or a teleported story.
*
* @see config-file
*/
export function createStoryFromConfig(
cfgUrl: string,
callback: (story: Story) => void,
fps?: uint,
createParams?: CreateStoryParams): void {
$.get(cfgUrl, (data: any): void => {
let cfgRoot: { [key: string]: any };
if (cfgUrl.endsWith('.json')) {
cfgRoot = data;
} else {
cfgRoot = {};
// @HINT: this code is a copy of server.ts / parseIniCfgContent
// @TODO: find a way to avoid duplicating this code
(data as string).split(/\n/).forEach(line =>
line.replace(/^\s*[\$@]abeamer-([\w+\-]+)\s*:\s*"?([^\n]+)"?\s*;\s*$/,
(_all, p1, p2) => {
cfgRoot[p1] = p2;
return '';
}));
cfgRoot = { config: { abeamer: cfgRoot } };
}
const cfgConfig = (cfgRoot as StoryConfig).config;
if (cfgConfig) {
const cfg: _InnerConfig = cfgConfig.abeamer;
if (cfg) {
cfg.fps = cfg.fps || fps;
_abeamer = new _Story(cfg, createParams || {});
callback(_abeamer);
}
}
});
}
}
let _abeamer: ABeamer._Story; | the_stack |
import { Type } from '@angular/core';
import {
ComponentFixture,
fakeAsync,
tick,
} from '@angular/core/testing';
import {
FormsModule,
ReactiveFormsModule,
} from '@angular/forms';
import { By } from '@angular/platform-browser';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { KEYS } from '@terminus/ngx-tools/keycodes';
import {
createComponent as createComponentInner,
createKeyboardEvent,
createMouseEvent,
dispatchEvent,
typeInElement,
} from '@terminus/ngx-tools/testing';
import { TsAutocompleteModule } from '@terminus/ui/autocomplete';
import {
TsChipCollectionComponent,
TsChipModule,
} from '@terminus/ui/chip';
import * as testComponents from '@terminus/ui/chip/testing';
// eslint-disable-next-line no-duplicate-imports
import {
getAllChipDebugElements,
getChipCollectionInstance,
getChipElement,
getChipInstance,
} from '@terminus/ui/chip/testing';
import { TsOptionModule } from '@terminus/ui/option';
/**
* @param component
*/
function createComponent<T>(component: Type<T>): ComponentFixture<T> {
const moduleImports = [
FormsModule,
ReactiveFormsModule,
TsChipModule,
TsAutocompleteModule,
TsOptionModule,
NoopAnimationsModule,
];
return createComponentInner(component, undefined, moduleImports);
}
describe(`TsChipCollection`, function() {
let fixture;
let chipElements;
let chipDebugElement;
let chipNativeElement;
let chipCollectionElement;
let chipCollectionInstance;
let chipCollectionNativeElement;
let chipInsideElement;
let chipInstance;
let chips;
let manager;
let nativeChips;
let nativeInput;
let testComponent;
let BACKSPACE_EVENT: KeyboardEvent;
let BACKSPACE_EVENT_CHIP: KeyboardEvent;
/**
* Set up for test.
*
* @param component
*/
function setupStandardCollection(component) {
fixture = createComponent(component);
fixture.detectChanges();
chipCollectionElement = fixture.debugElement.query(By.directive(TsChipCollectionComponent));
chipCollectionNativeElement = chipCollectionElement.nativeElement;
chipCollectionInstance = getChipCollectionInstance(fixture);
chipInstance = getChipInstance(fixture);
chipElements = getAllChipDebugElements(fixture);
chipDebugElement = fixture.debugElement.query(By.css('.ts-chip'));
chipNativeElement = chipDebugElement.nativeElement;
chipInsideElement = fixture.debugElement.query(By.css('.c-chip')).nativeElement;
nativeChips = chipCollectionNativeElement.querySelectorAll('ts-chip');
chips = chipCollectionInstance.chips;
manager = chipCollectionInstance.keyManager;
testComponent = fixture.debugElement.componentInstance;
}
test(`should exist`, function() {
setupStandardCollection(testComponents.RegularChip);
expect(chipNativeElement).toBeTruthy();
});
test(`should disable chip if isDisabled set`, fakeAsync(() => {
setupStandardCollection(testComponents.DisabledChip);
tick();
fixture.detectChanges();
expect(chipInstance.isDisabled).toEqual(true);
expect(chipNativeElement.classList).toContain('ts-chip--disabled');
}));
test(`should disable chip if isReadonly set`, fakeAsync(() => {
setupStandardCollection(testComponents.ReadonlyChip);
chipCollectionInstance.isReadonly = true;
tick();
fixture.detectChanges();
expect(chipInstance.isRemovable).toBeTruthy();
expect(chipInstance.chipCollectionRemovable).toBeFalsy();
expect(chipNativeElement.classList).not.toContain('c-chip--removable');
}));
test(`should be able to select`, fakeAsync(function() {
setupStandardCollection(testComponents.RegularChip);
chipInstance.select();
const chip1 = chipElements[0].nativeElement;
const chip2 = chipElements[1].nativeElement;
chip2._selected = false;
tick(1000);
fixture.detectChanges();
expect(chip1.classList).toContain('ts-chip--selected');
expect(chip2.classList).not.toContain('ts-chip--selected');
}));
test(`should set value properly`, function() {
fixture = createComponent(testComponents.NoValueChip);
fixture.detectChanges();
const instance = getChipCollectionInstance(fixture);
expect(instance.value).toEqual([]);
instance.value = ['banana'];
fixture.detectChanges();
expect(instance.value).toEqual(['banana']);
});
describe(`empty`, () => {
test(`should check the number of chips if the collection exists`, () => {
fixture = createComponent(testComponents.NoChip);
chipCollectionInstance = getChipCollectionInstance(fixture);
fixture.detectChanges();
expect(chipCollectionInstance.empty).toEqual(true);
});
});
test(`should be able to set isSelectable from chip collection and populate to chips`, fakeAsync(function() {
setupStandardCollection(testComponents.RegularChip);
chipCollectionInstance.isSelectable = true;
const chip = chips.toArray()[0];
fixture.detectChanges();
chipInstance.select();
tick(1000);
fixture.detectChanges();
expect(chip.chipCollectionSelectable).toBeTruthy();
}));
test(`should emit 'collectionChange' and 'removed' events when a chip is removed`, function() {
setupStandardCollection(testComponents.Events);
const buttons = fixture.nativeElement.querySelectorAll('.c-chip__remove');
const chipRemovalButton = buttons[1];
fixture.componentInstance.change = jest.fn();
fixture.componentInstance.removed = jest.fn();
expect(chipCollectionInstance.chips.length).toEqual(2);
chipRemovalButton.click();
fixture.detectChanges();
expect(fixture.componentInstance.change.mock.calls[0][0].source.chips.length).toEqual(1);
expect(fixture.componentInstance.removed.mock.calls[0][0].chip.value).toEqual('banana');
expect.assertions(3);
});
test(`should emit the tabUpdateFocus event`, fakeAsync(() => {
setupStandardCollection(testComponents.Events);
fixture.componentInstance.tabbed = jest.fn();
const event = document.createEvent('KeyboardEvent');
event.initEvent('keydown', true, false);
Object.defineProperties(event, { code: { get: () => KEYS.TAB.code } });
Object.defineProperties(event, { keyCode: { get: () => KEYS.TAB.keyCode } });
const chip = chipElements[1].nativeElement;
dispatchEvent(chip, event);
fixture.detectChanges();
expect(fixture.componentInstance.tabbed).toHaveBeenCalled();
}));
test(`should focus on nothing if blur and no focused chip`, () => {
setupStandardCollection(testComponents.RegularChip);
chipCollectionInstance.blur();
fixture.detectChanges();
expect(manager.activeItemIndex).toBe(-1);
});
test(`should deselect current selected chip if select another chip under allowMultipleSelections false`, () => {
setupStandardCollection(testComponents.NotAllowMultipleSelections);
const allChips = chips.toArray();
allChips[0].selected = true;
fixture.detectChanges();
allChips[1].selected = true;
fixture.detectChanges();
expect(allChips[0].selected).toBeFalsy();
});
describe(`focus`, function() {
test(`should return undefined if isDisabled`, function() {
setupStandardCollection(testComponents.DisabledChip);
expect(chipCollectionInstance.focus()).toEqual(undefined);
});
test(`should focus the first non-disabled chip`, function() {
setupStandardCollection(testComponents.RegularChip);
chipCollectionInstance.keyManager.setFirstItemActive = jest.fn();
chipCollectionInstance.focus();
fixture.detectChanges();
expect(chipCollectionInstance.keyManager.setFirstItemActive).toHaveBeenCalled();
});
test(`should set chip focus state if focus method is triggered`, function() {
setupStandardCollection(testComponents.RegularChip);
chipInstance.focus();
expect(chipInstance.hasFocus).toBeTruthy();
expect(chipCollectionInstance.focused).toBeTruthy();
});
test(`should call the blur() method when a chip is blurred`, fakeAsync(() => {
setupStandardCollection(testComponents.RegularChip);
chipCollectionInstance.blur = jest.fn();
chipInstance.handleBlur();
fixture.detectChanges();
tick();
expect(chipCollectionInstance.blur).toHaveBeenCalled();
}));
});
describe(`tabIndex`, () => {
beforeEach(() => {
setupStandardCollection(testComponents.Tabindex);
});
test(`should set tabindex if not disabled`, () => {
expect(chipCollectionNativeElement.getAttribute('tabindex')).toEqual('4');
fixture.componentInstance.index = -1;
fixture.detectChanges();
expect(chipCollectionNativeElement.getAttribute('tabindex')).toEqual('-1');
expect(chipCollectionInstance.tabIndex).toEqual(-1);
});
test(`should have tabindex null if disabled`, () => {
fixture.componentInstance.isDisabled = true;
fixture.detectChanges();
expect(chipCollectionNativeElement.getAttribute('tabindex')).toBeFalsy();
});
});
test(`should not allow keyboard focus when there's 0 chip`, fakeAsync(() => {
fixture = createComponent(testComponents.NoChip);
chipCollectionInstance = getChipCollectionInstance(fixture);
fixture.detectChanges();
tick(1000);
expect(chipCollectionInstance.tabIndex).toBe(-1);
}));
test('should not have the aria-selected attribute when is not selectable', () => {
setupStandardCollection(testComponents.StandardChipCollection);
testComponent = fixture.debugElement.componentInstance;
testComponent.selectable = false;
fixture.detectChanges();
const chipsValid = chips.toArray().every(chip => !chip.isSelectable && !chip.elementRef.nativeElement.hasAttribute('aria-selectable'));
expect(chipsValid).toBe(true);
});
describe(`ID`, function() {
beforeEach(() => {
setupStandardCollection(testComponents.Id);
});
test(`should get UID as its id`, () => {
fixture.componentInstance.myId = undefined as any;
fixture.detectChanges();
expect(chipCollectionNativeElement.getAttribute('id')).toEqual(expect.stringContaining('ts-chip-collection-'));
expect(chipCollectionInstance.id).toEqual(expect.stringContaining('ts-chip-collection-'));
});
test(`should set custom id`, () => {
fixture.componentInstance.myId = 1;
fixture.detectChanges();
expect(chipCollectionInstance.id).toEqual(1);
});
});
// TODO: Move autocomplete tests to the autocomplete spec file(s)
describe(`used in autocomplete component`, function() {
beforeEach(() => {
setupStandardCollection(testComponents.Autocomplete);
nativeInput = fixture.nativeElement.querySelector('input');
BACKSPACE_EVENT = createKeyboardEvent('keydown', KEYS.BACKSPACE, nativeInput);
BACKSPACE_EVENT_CHIP = createKeyboardEvent('keydown', KEYS.BACKSPACE, chipCollectionNativeElement);
});
test(`should focus on last item if input is empty and BACKSPACE is used`, () => {
// Focus the input
nativeInput.focus();
expect(manager.activeItemIndex).toBe(-1);
// Press the BACKSPACE key
chipCollectionInstance.keydown(BACKSPACE_EVENT);
fixture.detectChanges();
// It focuses the last chip
expect(manager.activeItemIndex).toEqual(chips.length - 1);
chipCollectionInstance.keydown(BACKSPACE_EVENT_CHIP);
fixture.detectChanges();
const nodeName = document.activeElement ? document.activeElement.nodeName : '';
// Focus still on the chip
expect(nodeName).toEqual('TS-CHIP');
});
test(`should focus on input if input field is not empty and BACKSPACE is used`, () => {
// Focus the input
nativeInput.focus();
// Type letter into the input field
typeInElement('a', nativeInput);
// Press the BACKSPACE key
chipCollectionInstance.keydown(BACKSPACE_EVENT);
// It focuses on the input field
fixture.detectChanges();
const nodeName = document.activeElement ? document.activeElement.nodeName : '';
expect(nodeName).toBe('INPUT');
});
});
describe(`keydown`, function() {
beforeEach(() => {
setupStandardCollection(testComponents.StandardChipCollection);
});
test(`should set first item active when use HOME key`, fakeAsync(function() {
const element = getChipInstance(fixture)['elementRef'].nativeElement;
chipCollectionInstance.keyManager.setFirstItemActive = jest.fn();
const event = document.createEvent('KeyboardEvent');
event.initEvent('keydown', true, false);
Object.defineProperties(event, { code: { get: () => KEYS.HOME.code } });
fixture.detectChanges();
element.dispatchEvent(event);
fixture.detectChanges();
expect(chipCollectionInstance.keyManager.setFirstItemActive).toHaveBeenCalled();
}));
test(`should select OR deselect all items with CTRL + A`, fakeAsync(function() {
chipCollectionInstance.allowMultipleSelections = true;
fixture.detectChanges();
tick(1000);
const element = getChipElement(fixture);
const event = createKeyboardEvent('keydown', KEYS.A, element);
Object.defineProperty(event, 'ctrlKey', { get: () => true });
dispatchEvent(element, event);
fixture.detectChanges();
tick(3000);
expect(chipCollectionInstance.selectionModel.selected.length).toEqual(1);
expect(event.defaultPrevented).toEqual(true);
dispatchEvent(element, event);
fixture.detectChanges();
expect(chipCollectionInstance.selectionModel.selected.length).toEqual(0);
}));
});
describe(`updateFocus`, () => {
beforeEach(() => {
setupStandardCollection(testComponents.StandardChipCollection);
});
test(`should focus on last chip when END key pressed`, () => {
const firstNativeChip = nativeChips[0] as HTMLElement;
const array = chips.toArray();
const lastIndex = array.length - 1;
const firstItem = array[0];
const END_EVENT = createKeyboardEvent('keydown', KEYS.END, firstNativeChip);
firstItem.focus();
chipCollectionInstance.keydown(END_EVENT);
fixture.detectChanges();
expect(manager.activeItemIndex).toEqual(lastIndex);
});
test('should focus the next chip when one is removed ', () => {
jest.useFakeTimers();
testComponent = fixture.debugElement.componentInstance;
const array = chips.toArray();
const lastIndex = array.length - 1;
const lastItem = array[lastIndex];
// Focus the last item
lastItem.focus();
// Destroy the last item
testComponent.options.pop();
fixture.detectChanges();
jest.advanceTimersByTime(2000);
// It focuses the next-to-last item
expect(manager.activeItemIndex).toEqual(lastIndex - 1);
});
});
test('should focus the chip collection if the last focused item is removed', fakeAsync(function() {
setupStandardCollection(testComponents.OneChip);
const chip = chipCollectionInstance.chips.toArray()[0];
testComponent = fixture.debugElement.componentInstance;
chip.hasFocus = true;
fixture.detectChanges();
chipCollectionInstance.focus = jest.fn();
// Remove the chip
testComponent.options = [];
fixture.detectChanges();
tick(1000);
fixture.detectChanges();
// Focus is now on chip collection
expect(chipCollectionInstance.focus).toHaveBeenCalled();
}));
describe(`shift+click`, () => {
let mouseEvent: MouseEvent;
let firstChip;
beforeEach(() => {
setupStandardCollection(testComponents.StandardChipCollection);
firstChip = chipCollectionInstance.chips.toArray()[0];
mouseEvent = createMouseEvent('click');
Object.defineProperty(mouseEvent, 'shiftKey', { get: () => true });
});
test(`should select a chip if it's not selected and allowMultiple is true`, () => {
chipCollectionInstance.allowMultipleSelections = true;
// If chip is not selected, shift click will select the chip
firstChip.selected = false;
fixture.detectChanges();
dispatchEvent(firstChip.elementRef.nativeElement, mouseEvent);
fixture.detectChanges();
expect(firstChip.selected).toBeTruthy();
// If chip is selected, shift click will deselect the chip
dispatchEvent(firstChip.elementRef.nativeElement, mouseEvent);
fixture.detectChanges();
expect(firstChip.selected).toBeFalsy();
});
test(`should not select a chip if allowMultiple being false`, fakeAsync(() => {
chipCollectionInstance.allowMultipleSelections = false;
firstChip.selected = false;
fixture.detectChanges();
dispatchEvent(chipInsideElement, mouseEvent);
fixture.detectChanges();
tick(3000);
expect(firstChip.selected).toBeFalsy();
}));
});
}); | the_stack |
/* tslint:disable:max-file-line-count */
import APERY = require( '@stdlib/constants/float64/apery' );
import CATALAN = require( '@stdlib/constants/float64/catalan' );
import CBRT_EPS = require( '@stdlib/constants/float64/cbrt-eps' );
import E = require( '@stdlib/constants/float64/e' );
import EPS = require( '@stdlib/constants/float64/eps' );
import EULERGAMMA = require( '@stdlib/constants/float64/eulergamma' );
import EXPONENT_BIAS = require( '@stdlib/constants/float64/exponent-bias' );
import FOURTH_PI = require( '@stdlib/constants/float64/fourth-pi' );
import FOURTH_ROOT_EPS = require( '@stdlib/constants/float64/fourth-root-eps' );
import GAMMA_LANCZOS_G = require( '@stdlib/constants/float64/gamma-lanczos-g' );
import GLAISHER = require( '@stdlib/constants/float64/glaisher-kinkelin' );
import HALF_LN2 = require( '@stdlib/constants/float64/half-ln-two' );
import HALF_PI = require( '@stdlib/constants/float64/half-pi' );
import HIGH_WORD_EXPONENT_MASK = require( '@stdlib/constants/float64/high-word-exponent-mask' );
import HIGH_WORD_SIGNIFICAND_MASK = require( '@stdlib/constants/float64/high-word-significand-mask' );
import LN_HALF = require( '@stdlib/constants/float64/ln-half' );
import LN_PI = require( '@stdlib/constants/float64/ln-pi' );
import LN_SQRT_TWO_PI = require( '@stdlib/constants/float64/ln-sqrt-two-pi' );
import LN10 = require( '@stdlib/constants/float64/ln-ten' );
import LN2 = require( '@stdlib/constants/float64/ln-two' );
import LN_TWO_PI = require( '@stdlib/constants/float64/ln-two-pi' );
import LOG2E = require( '@stdlib/constants/float64/log2-e' );
import LOG10E = require( '@stdlib/constants/float64/log10-e' );
import MAX = require( '@stdlib/constants/float64/max' );
import MAX_BASE2_EXPONENT = require( '@stdlib/constants/float64/max-base2-exponent' );
import MAX_BASE2_EXPONENT_SUBNORMAL = require( '@stdlib/constants/float64/max-base2-exponent-subnormal' );
import MAX_BASE10_EXPONENT = require( '@stdlib/constants/float64/max-base10-exponent' );
import MAX_BASE10_EXPONENT_SUBNORMAL = require( '@stdlib/constants/float64/max-base10-exponent-subnormal' );
import MAX_LN = require( '@stdlib/constants/float64/max-ln' );
import MAX_SAFE_FIBONACCI = require( '@stdlib/constants/float64/max-safe-fibonacci' );
import MAX_SAFE_INTEGER = require( '@stdlib/constants/float64/max-safe-integer' );
import MAX_SAFE_LUCAS = require( '@stdlib/constants/float64/max-safe-lucas' );
import MAX_SAFE_NTH_FIBONACCI = require( '@stdlib/constants/float64/max-safe-nth-fibonacci' );
import MAX_SAFE_NTH_LUCAS = require( '@stdlib/constants/float64/max-safe-nth-lucas' );
import MIN_BASE2_EXPONENT = require( '@stdlib/constants/float64/min-base2-exponent' );
import MIN_BASE2_EXPONENT_SUBNORMAL = require( '@stdlib/constants/float64/min-base2-exponent-subnormal' );
import MIN_BASE10_EXPONENT = require( '@stdlib/constants/float64/min-base10-exponent' );
import MIN_BASE10_EXPONENT_SUBNORMAL = require( '@stdlib/constants/float64/min-base10-exponent-subnormal' );
import MIN_LN = require( '@stdlib/constants/float64/min-ln' );
import MIN_SAFE_INTEGER = require( '@stdlib/constants/float64/min-safe-integer' );
import NINF = require( '@stdlib/constants/float64/ninf' );
import NUM_BYTES = require( '@stdlib/constants/float64/num-bytes' );
import PHI = require( '@stdlib/constants/float64/phi' );
import PI = require( '@stdlib/constants/float64/pi' );
import PI_SQUARED = require( '@stdlib/constants/float64/pi-squared' );
import PINF = require( '@stdlib/constants/float64/pinf' );
import PRECISION = require( '@stdlib/constants/float64/precision' );
import SMALLEST_NORMAL = require( '@stdlib/constants/float64/smallest-normal' );
import SMALLEST_SUBNORMAL = require( '@stdlib/constants/float64/smallest-subnormal' );
import SQRT_EPS = require( '@stdlib/constants/float64/sqrt-eps' );
import SQRT_HALF = require( '@stdlib/constants/float64/sqrt-half' );
import SQRT_HALF_PI = require( '@stdlib/constants/float64/sqrt-half-pi' );
import SQRT_PHI = require( '@stdlib/constants/float64/sqrt-phi' );
import SQRT_PI = require( '@stdlib/constants/float64/sqrt-pi' );
import SQRT_THREE = require( '@stdlib/constants/float64/sqrt-three' );
import SQRT_TWO = require( '@stdlib/constants/float64/sqrt-two' );
import SQRT_TWO_PI = require( '@stdlib/constants/float64/sqrt-two-pi' );
import TWO_PI = require( '@stdlib/constants/float64/two-pi' );
/**
* Interface describing the `float64` namespace.
*/
interface Namespace {
/**
* Apéry's constant.
*
* @example
* var apery = ns.APERY;
* // returns 1.2020569031595942
*/
APERY: typeof APERY;
/**
* Catalan's constant.
*
* @example
* var catalan = ns.CATALAN;
* // returns 0.915965594177219
*/
CATALAN: typeof CATALAN;
/**
* Cube root of double-precision floating-point epsilon.
*
* @example
* var eps = ns.CBRT_EPS;
* // returns 0.0000060554544523933395
*/
CBRT_EPS: typeof CBRT_EPS;
/**
* Euler's number.
*
* @example
* var e = ns.E;
* // returns 2.718281828459045
*/
E: typeof E;
/**
* Difference between one and the smallest value greater than one that can be represented as a double-precision floating-point number.
*
* @example
* var eps = ns.EPS;
* // returns 2.220446049250313e-16
*/
EPS: typeof EPS;
/**
* The Euler-Mascheroni constant.
*
* @example
* var val = ns.EULERGAMMA;
* // returns 0.5772156649015329
*/
EULERGAMMA: typeof EULERGAMMA;
/**
* The bias of a double-precision floating-point number's exponent.
*
* @example
* var bias = ns.EXPONENT_BIAS;
* // returns 1023
*/
EXPONENT_BIAS: typeof EXPONENT_BIAS;
/**
* One fourth times the mathematical constant `π`.
*
* @example
* var val = ns.FOURTH_PI;
* // returns 7.85398163397448309616e-1
*/
FOURTH_PI: typeof FOURTH_PI;
/**
* Fourth root of double-precision floating-point epsilon.
*
* @example
* var eps = ns.FOURTH_ROOT_EPS;
* // returns 0.0001220703125
*/
FOURTH_ROOT_EPS: typeof FOURTH_ROOT_EPS;
/**
* Arbitrary constant `g` to be used in Lanczos approximation functions.
*
* @example
* var g = ns.GAMMA_LANCZOS_G;
* // returns 10.900511
*/
GAMMA_LANCZOS_G: typeof GAMMA_LANCZOS_G;
/**
* Glaisher-Kinkelin constant.
*
* @example
* var val = ns.GLAISHER;
* // returns 1.2824271291006226
*/
GLAISHER: typeof GLAISHER;
/**
* One half times the natural logarithm of 2.
*
* @example
* var val = ns.HALF_LN2;
* // returns 3.46573590279972654709e-01
*/
HALF_LN2: typeof HALF_LN2;
/**
* One half times the mathematical constant `π`.
*
* @example
* var val = ns.HALF_PI;
* // returns 1.5707963267948966
*/
HALF_PI: typeof HALF_PI;
/**
* High word mask for the exponent of a double-precision floating-point number.
*
* @example
* var mask = ns.HIGH_WORD_EXPONENT_MASK;
* // returns 2146435072
*/
HIGH_WORD_EXPONENT_MASK: typeof HIGH_WORD_EXPONENT_MASK;
/**
* High word mask for the significand of a double-precision floating-point number.
*
* @example
* var mask = ns.HIGH_WORD_SIGNIFICAND_MASK;
* // returns 1048575
*/
HIGH_WORD_SIGNIFICAND_MASK: typeof HIGH_WORD_SIGNIFICAND_MASK;
/**
* Natural logarithm of `1/2`.
*
* @example
* var val = ns.LN_HALF;
* // returns -0.6931471805599453
*/
LN_HALF: typeof LN_HALF;
/**
* Natural logarithm of the mathematical constant `π`.
*
* @example
* var val = ns.LN_PI;
* // returns 1.1447298858494002
*/
LN_PI: typeof LN_PI;
/**
* Natural logarithm of the square root of `2π`.
*
* @example
* var val = ns.LN_SQRT_TWO_PI;
* // returns 0.9189385332046728
*/
LN_SQRT_TWO_PI: typeof LN_SQRT_TWO_PI;
/**
* Natural logarithm of `10`.
*
* @example
* var val = ns.LN10;
* // returns 2.302585092994046
*/
LN10: typeof LN10;
/**
* Natural logarithm of `2`.
*
* @example
* var val = ns.LN2;
* // returns 0.6931471805599453
*/
LN2: typeof LN2;
/**
* Natural logarithm of `2π`.
*
* @example
* var val = ns.LN_TWO_PI;
* // returns 1.8378770664093456
*/
LN_TWO_PI: typeof LN_TWO_PI;
/**
* Base 2 logarithm of Euler's number.
*
* @example
* var val = ns.LOG2E;
* // returns 1.4426950408889634
*/
LOG2E: typeof LOG2E;
/**
* Base 10 logarithm of Euler's number.
*
* @example
* var val = ns.LOG10E;
* // returns 0.4342944819032518
*/
LOG10E: typeof LOG10E;
/**
* Maximum double-precision floating-point number.
*
* @example
* var max = ns.MAX;
* // returns 1.7976931348623157e+308
*/
MAX: typeof MAX;
/**
* The maximum biased base 2 exponent for a double-precision floating-point number.
*
* @example
* var exp = ns.MAX_BASE2_EXPONENT;
* // returns 1023
*/
MAX_BASE2_EXPONENT: typeof MAX_BASE2_EXPONENT;
/**
* The maximum biased base 2 exponent for a subnormal double-precision floating-point number.
*
* @example
* var exp = ns.MAX_BASE2_EXPONENT_SUBNORMAL;
* // returns -1023
*/
MAX_BASE2_EXPONENT_SUBNORMAL: typeof MAX_BASE2_EXPONENT_SUBNORMAL;
/**
* The maximum base 10 exponent for a double-precision floating-point number.
*
* @example
* var exp = ns.MAX_BASE10_EXPONENT;
* // returns 308
*/
MAX_BASE10_EXPONENT: typeof MAX_BASE10_EXPONENT;
/**
* The maximum base 10 exponent for a subnormal double-precision floating-point number.
*
* @example
* var exp = ns.MAX_BASE10_EXPONENT_SUBNORMAL;
* // returns -308
*/
MAX_BASE10_EXPONENT_SUBNORMAL: typeof MAX_BASE10_EXPONENT_SUBNORMAL;
/**
* Natural logarithm of the maximum double-precision floating-point number.
*
* @example
* var val = ns.MAX_LN;
* // returns 709.782712893384
*/
MAX_LN: typeof MAX_LN;
/**
* Maximum safe Fibonacci number when stored in double-precision floating-point format.
*
* @example
* var max = ns.MAX_SAFE_FIBONACCI;
* // returns 8944394323791464
*/
MAX_SAFE_FIBONACCI: typeof MAX_SAFE_FIBONACCI;
/**
* Maximum safe double-precision floating-point integer.
*
* @example
* var max = ns.MAX_SAFE_INTEGER;
* // returns 9007199254740991
*/
MAX_SAFE_INTEGER: typeof MAX_SAFE_INTEGER;
/**
* Maximum safe Lucas number when stored in double-precision floating-point format.
*
* @example
* var max = ns.MAX_SAFE_LUCAS;
* // returns 7639424778862807
*/
MAX_SAFE_LUCAS: typeof MAX_SAFE_LUCAS;
/**
* Maximum safe nth Fibonacci number when stored in double-precision floating-point format.
*
* @example
* var max = ns.MAX_SAFE_NTH_FIBONACCI;
* // returns 78
*/
MAX_SAFE_NTH_FIBONACCI: typeof MAX_SAFE_NTH_FIBONACCI;
/**
* Maximum safe nth Lucas number when stored in double-precision floating-point format.
*
* @example
* var max = ns.MAX_SAFE_NTH_LUCAS;
* // returns 76
*/
MAX_SAFE_NTH_LUCAS: typeof MAX_SAFE_NTH_LUCAS;
/**
* The minimum biased base 2 exponent for a normal double-precision floating-point number.
*
* @example
* var min = ns.MIN_BASE2_EXPONENT;
* // returns -1022
*/
MIN_BASE2_EXPONENT: typeof MIN_BASE2_EXPONENT;
/**
* The minimum biased base 2 exponent for a subnormal double-precision floating-point number.
*
* @example
* var min = ns.MIN_BASE2_EXPONENT_SUBNORMAL;
* // returns -1074
*/
MIN_BASE2_EXPONENT_SUBNORMAL: typeof MIN_BASE2_EXPONENT_SUBNORMAL;
/**
* The minimum base 10 exponent for a normal double-precision floating-point number.
*
* @example
* var min = ns.MIN_BASE10_EXPONENT;
* // returns -308
*/
MIN_BASE10_EXPONENT: typeof MIN_BASE10_EXPONENT;
/**
* The minimum base 10 exponent for a subnormal double-precision floating-point number.
*
* @example
* var min = ns.MIN_BASE10_EXPONENT_SUBNORMAL;
* // returns -324
*/
MIN_BASE10_EXPONENT_SUBNORMAL: typeof MIN_BASE10_EXPONENT_SUBNORMAL;
/**
* Natural logarithm of the smallest normalized double-precision floating-point number.
*
* @example
* var min = ns.MIN_LN;
* // returns -708.3964185322641
*/
MIN_LN: typeof MIN_LN;
/**
* Minimum safe double-precision floating-point integer.
*
* @example
* var min = ns.MIN_SAFE_INTEGER;
* // returns -9007199254740991
*/
MIN_SAFE_INTEGER: typeof MIN_SAFE_INTEGER;
/**
* Double-precision floating-point negative infinity.
*
* @example
* var ninf = ns.NINF;
* // returns -Infinity
*/
NINF: typeof NINF;
/**
* Size (in bytes) of a double-precision floating-point number.
*
* @example
* var bytes = ns.NUM_BYTES;
* // returns 8
*/
NUM_BYTES: typeof NUM_BYTES;
/**
* Golden ratio.
*
* @example
* var val = ns.PHI;
* // returns 1.618033988749895
*/
PHI: typeof PHI;
/**
* The mathematical constant `π`.
*
* @example
* var val = ns.PI;
* // returns 3.141592653589793
*/
PI: typeof PI;
/**
* Square of the mathematical constant `π`.
*
* @example
* var val = ns.PI_SQUARED;
* // returns 9.869604401089358
*/
PI_SQUARED: typeof PI_SQUARED;
/**
* Double-precision floating-point positive infinity.
*
* @example
* var pinf = ns.PINF;
* // returns Infinity
*/
PINF: typeof PINF;
/**
* Effective number of bits in the significand of a double-precision floating-point number.
*
* @example
* var precision = ns.PRECISION;
* // returns 53
*/
PRECISION: typeof PRECISION;
/**
* Smallest positive double-precision floating-point normal number.
*
* @example
* var smallest = ns.SMALLEST_NORMAL;
* // returns 2.2250738585072014e-308
*/
SMALLEST_NORMAL: typeof SMALLEST_NORMAL;
/**
* Smallest positive double-precision floating-point subnormal number.
*
* @example
* var smallest = ns.SMALLEST_SUBNORMAL;
* // returns 4.940656458412465e-324
*/
SMALLEST_SUBNORMAL: typeof SMALLEST_SUBNORMAL;
/**
* Square root of double-precision floating-point epsilon.
*
* @example
* var val = ns.SQRT_EPS;
* // returns 0.14901161193847656e-7
*/
SQRT_EPS: typeof SQRT_EPS;
/**
* Square root of `1/2`.
*
* @example
* var val = ns.SQRT_HALF;
* // returns 0.7071067811865476
*/
SQRT_HALF: typeof SQRT_HALF;
/**
* Square root of the mathematical constant `π` divided by `2`.
*
* @example
* var val = ns.SQRT_HALF_PI;
* // returns 1.2533141373155003
*/
SQRT_HALF_PI: typeof SQRT_HALF_PI;
/**
* Square root of the golden ratio.
*
* @example
* var val = ns.SQRT_PHI;
* // returns 1.272019649514069
*/
SQRT_PHI: typeof SQRT_PHI;
/**
* Square root of the mathematical constant `π`.
*
* @example
* var val = ns.SQRT_PI;
* // returns 1.7724538509055160
*/
SQRT_PI: typeof SQRT_PI;
/**
* Square root of `3`.
*
* @example
* var val = ns.SQRT_THREE;
* // returns 1.7320508075688772
*/
SQRT_THREE: typeof SQRT_THREE;
/**
* Square root of `2`.
*
* @example
* var val = ns.SQRT_TWO;
* // returns 1.4142135623730951
*/
SQRT_TWO: typeof SQRT_TWO;
/**
* Square root of the mathematical constant `π` times `2`.
*
* @example
* var val = ns.SQRT_TWO_PI;
* // returns 2.5066282746310007
*/
SQRT_TWO_PI: typeof SQRT_TWO_PI;
/**
* The mathematical constant `π` times `2`.
*
* @example
* var val = ns.TWO_PI;
* // returns 6.283185307179586
*/
TWO_PI: typeof TWO_PI;
}
/**
* Double-precision floating-point mathematical constants.
*/
declare var ns: Namespace;
// EXPORTS //
export = ns; | the_stack |
import { FabricWallet } from '../src/FabricWallet';
import * as sinon from 'sinon';
import * as sinonChai from 'sinon-chai';
import * as chai from 'chai';
import * as chaiAsPromised from 'chai-as-promised';
import { Wallet } from 'fabric-network';
import { FabricIdentity } from 'ibm-blockchain-platform-common';
import * as WalletMigration from 'fabric-wallet-migration';
chai.use(chaiAsPromised);
chai.use(sinonChai);
const should: Chai.Should = chai.should();
describe('FabricWallet', () => {
let mySandBox: sinon.SinonSandbox;
let putStub: sinon.SinonStub;
let removeStub: sinon.SinonStub;
let getStub: sinon.SinonStub;
let listStub: sinon.SinonStub;
let newFileSystemWalletStoreStub: sinon.SinonStub;
beforeEach(async () => {
mySandBox = sinon.createSandbox();
putStub = mySandBox.stub(Wallet.prototype, 'put');
removeStub = mySandBox.stub(Wallet.prototype, 'remove');
getStub = mySandBox.stub(Wallet.prototype, 'get');
listStub = mySandBox.stub(Wallet.prototype, 'list');
newFileSystemWalletStoreStub = mySandBox.stub(WalletMigration, 'newFileSystemWalletStore').resolves({});
});
afterEach(() => {
mySandBox.restore();
});
describe('importIdentity', () => {
it('should import identity', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/path');
putStub.resolves();
await wallet.importIdentity('---CERT---', '---KEY---', 'identity1', 'myMSP');
putStub.should.have.been.calledOnceWithExactly('identity1', {
credentials: {
certificate: '---CERT---',
privateKey: '---KEY---',
},
mspId: 'myMSP',
type: 'X.509',
});
});
});
describe('removeIdentity', () => {
it('should remove identity', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/path');
removeStub.resolves();
await wallet.removeIdentity('identity1');
removeStub.should.have.been.calledOnceWithExactly('identity1');
});
});
describe('getWalletPath', () => {
it('should get wallet path', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
const result: string = wallet.getWalletPath();
result.should.equal('tmp/myPath');
});
});
describe('getWallet', () => {
it('should get wallet', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
const result: Wallet = wallet.getWallet();
should.exist(result);
});
});
describe('getIdentityNames', () => {
it('should get the identity names', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/path');
listStub.resolves(['label1', 'label2', 'label3']);
const identityNames: string[] = await wallet.getIdentityNames();
identityNames.should.deep.equal(['label1', 'label2', 'label3']);
});
});
describe('#getIDs', () => {
it('should return all requested identities in wallet as an array of Fabric Identities', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
getStub.onFirstCall().resolves({
credentials: {
certificate: 'myCert',
privateKey: 'myKey',
},
mspId: 'myMSP',
type: 'X.509',
});
getStub.onSecondCall().resolves({
credentials: {
certificate: 'myCert2',
privateKey: 'myKey2',
},
mspId: 'myMSP2',
type: 'X.509',
});
const allIDs: FabricIdentity[] = [
{
name: 'identity_a',
cert: Buffer.from('myCert').toString('base64'),
private_key: Buffer.from('myKey').toString('base64'),
msp_id: 'myMSP'
},
{
name: 'identity_b',
cert: Buffer.from('myCert2').toString('base64'),
private_key: Buffer.from('myKey2').toString('base64'),
msp_id: 'myMSP2'
}
];
const identities: FabricIdentity[] = await wallet.getIDs(['identity_a', 'identity_b']);
identities.should.deep.equal(allIDs);
});
});
describe('#getIdentities', () => {
it('should return any identities', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
listStub.resolves(['label1', 'label2', 'label3']);
getStub.onFirstCall().resolves({
credentials: {
certificate: 'myCert',
privateKey: 'myKey',
},
mspId: 'myMSP',
type: 'X.509',
});
getStub.onSecondCall().resolves({
credentials: {
certificate: 'myCert2',
privateKey: 'myKey2',
},
mspId: 'myMSP2',
type: 'X.509',
});
getStub.onThirdCall().resolves({
credentials: {
certificate: 'myCert3',
privateKey: 'myKey3',
},
mspId: 'myMSP3',
type: 'X.509',
});
const identityOne: FabricIdentity = {
cert: 'myCert',
private_key: 'myKey',
msp_id: 'myMSP',
name: 'label1'
};
const identityTwo: FabricIdentity = {
cert: 'myCert2',
private_key: 'myKey2',
msp_id: 'myMSP2',
name: 'label2'
};
const identityThree: FabricIdentity = {
cert: 'myCert3',
private_key: 'myKey3',
msp_id: 'myMSP3',
name: 'label3'
};
const identities: FabricIdentity[] = await wallet.getIdentities();
identities.should.deep.equal([identityOne, identityTwo, identityThree]);
});
});
describe('#getIdentity', () => {
it('should get the wanted identity', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
getStub.resolves({
credentials: {
certificate: 'myCert',
privateKey: 'myKey',
},
mspId: 'myMSP',
type: 'X.509',
});
const identityOne: FabricIdentity = {
cert: 'myCert',
private_key: 'myKey',
msp_id: 'myMSP',
name: 'label1'
};
const identity: FabricIdentity = await wallet.getIdentity('label1');
identity.should.deep.equal(identityOne);
});
});
describe('#exists', () => {
it('should return true if the identity exists', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
getStub.resolves({
credentials: {
certificate: 'myCert',
privateKey: 'myKey',
},
mspId: 'myMSP',
type: 'X.509',
});
const exists: boolean = await wallet.exists('label1');
exists.should.equal(true);
});
it('should return false if the identity does not exists', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
getStub.resolves(undefined);
const exists: boolean = await wallet.exists('label1');
exists.should.equal(false);
});
});
describe('migrateToV2Wallet', () => {
it('should write v2 identity file if v1 identities found', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
listStub.resolves(['identity1']);
getStub.resolves(
{
credentials: {
certificate: '---CERT---',
privateKey: '---KEY---',
},
mspId: 'myMSP',
type: 'X.509',
}
);
putStub.resolves();
await wallet.migrateToV2Wallet();
newFileSystemWalletStoreStub.should.have.been.calledOnceWithExactly('tmp/myPath');
listStub.should.have.been.calledOnceWithExactly();
getStub.should.have.been.calledOnceWithExactly('identity1');
putStub.should.have.been.calledOnceWithExactly('identity1', {
credentials: {
certificate: '---CERT---',
privateKey: '---KEY---',
},
mspId: 'myMSP',
type: 'X.509',
});
});
it('should not write v2 identity file if no v1 identities found', async () => {
const wallet: FabricWallet = await FabricWallet.newFabricWallet('tmp/myPath');
listStub.resolves(['identity1']);
getStub.resolves();
await wallet.migrateToV2Wallet();
newFileSystemWalletStoreStub.should.have.been.calledOnceWithExactly('tmp/myPath');
listStub.should.have.been.calledOnceWithExactly();
getStub.should.have.been.calledOnceWithExactly('identity1');
// tslint:disable-next-line: no-unused-expression
putStub.should.have.not.been.called;
});
});
}); | the_stack |
import { FiniteField, Vector, Matrix, Assertion, LogFunction } from "@guildofweavers/genstark";
import { AirContext, ConstraintDescriptor, ProvingContext, VerificationContext } from "@guildofweavers/air-assembly";
import { BoundaryConstraints } from "./BoundaryConstraints";
import { ZeroPolynomial } from "./ZeroPolynomial";
import { StarkError } from "../StarkError";
// CLASS DEFINITION
// ================================================================================================
export class CompositionPolynomial {
private readonly field : FiniteField;
private readonly combinationDegree : number;
private readonly constraintGroups : { degree: number; indexes: number[]; }[];
private readonly dCoefficients : Vector;
private readonly bCoefficients : Vector;
private readonly bPoly : BoundaryConstraints;
private readonly zPoly : ZeroPolynomial;
private readonly log : LogFunction;
readonly compositionDegree : number;
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
constructor(assertions: Assertion[], seed: Buffer, context: AirContext, logger: LogFunction) {
this.field = context.field;
this.bPoly = new BoundaryConstraints(assertions, context);
this.zPoly = new ZeroPolynomial(context);
this.log = logger;
// degree of trace polynomial combination
this.combinationDegree = getCombinationDegree(context.constraints, context.traceLength);
// degree of composition polynomial is deg(C(x)) = deg(Q(x)) - deg(Z(x))
this.compositionDegree = Math.max(this.combinationDegree - context.traceLength, context.traceLength);
// group transition constraints together by their degree
this.constraintGroups = groupTransitionConstraints(context.constraints, context.traceLength);
// create coefficients needed for linear combination
let dCoefficientCount = context.constraints.length;
for (let { degree, indexes } of this.constraintGroups) {
if (degree < this.combinationDegree) {
dCoefficientCount += indexes.length;
};
}
let bCoefficientCount = this.bPoly.count;
if (this.compositionDegree > context.traceLength) {
bCoefficientCount = bCoefficientCount * 2;
}
const coefficients = this.field.prng(seed, dCoefficientCount + bCoefficientCount).toValues();
this.dCoefficients = this.field.newVectorFrom(coefficients.slice(0, dCoefficientCount));
this.bCoefficients = this.field.newVectorFrom(coefficients.slice(dCoefficientCount))
}
// PUBLIC ACCESSORS
// --------------------------------------------------------------------------------------------
get coefficientCount(): number {
return this.dCoefficients.length + this.bCoefficients.length;
}
// PROOF METHODS
// --------------------------------------------------------------------------------------------
evaluateAll(pPolys: Matrix, pEvaluations: Matrix, context: ProvingContext): Vector {
// 1 ----- evaluate transition constraints over composition domain
let qEvaluations: Matrix;
try {
qEvaluations = context.evaluateTransitionConstraints(pPolys);
}
catch (error) {
throw new StarkError('Failed to evaluate transition constraints', error);
}
this.log('Computed transition constraint polynomials Q(x)');
// 2 ----- adjusted transition constraint degrees
const compositionFactor = context.evaluationDomain.length / context.compositionDomain.length;
const compositionRou = this.field.exp(context.rootOfUnity, BigInt(compositionFactor));
const qaEvaluations = this.field.matrixRowsToVectors(qEvaluations);
for (let { degree, indexes } of this.constraintGroups) {
if (degree === this.combinationDegree) continue;
// compute the sequence of powers for the incremental degree
let incrementalDegree = BigInt(this.combinationDegree - degree);
let powerSeed = this.field.exp(compositionRou, incrementalDegree);
let powers = this.field.getPowerSeries(powerSeed, context.compositionDomain.length);
// raise the degree of evaluations and add adjusted evaluations the list
for (let i of indexes) {
qaEvaluations.push(this.field.mulVectorElements(qaEvaluations[i], powers));
}
}
this.log('Adjusted degrees of Q(x) polynomials');
// 3 ----- merge transition constraints into a single polynomial
// first, compute linear combination of adjusted evaluations
const qcEvaluations = this.field.combineManyVectors(qaEvaluations, this.dCoefficients);
this.log('Computed linear combination of Q(x) polynomials');
// then, perform low-degree extension from composition domain to evaluation domain
const qcPoly = this.field.interpolateRoots(context.compositionDomain, qcEvaluations);
const qeEvaluations = this.field.evalPolyAtRoots(qcPoly, context.evaluationDomain);
this.log('Performed low degree extensions of Q(x) polynomial');
// 4 ----- compute D(x) = Q(x) / Z(x)
const zEvaluations = this.zPoly.evaluateAll(context.evaluationDomain);
this.log('Computed Z(x) polynomial');
const zInverses = this.field.divVectorElements(zEvaluations.denominators, zEvaluations.numerators);
this.log('Computed Z(x) inverses');
const dEvaluations = this.field.mulVectorElements(qeEvaluations, zInverses);
this.log('Computed D(x) polynomial');
// 5 ------- compute boundary constraints B(x)
const bEvaluations = this.bPoly.evaluateAll(pEvaluations, context.evaluationDomain);
this.log('Computed boundary constraint polynomials B(x)');
// 6 ------- Adjust degrees of boundary constraints
const baEvaluations = this.field.matrixRowsToVectors(bEvaluations);
const bIncrementalDegree = BigInt(this.compositionDegree - context.traceLength);
if (bIncrementalDegree > 0n) {
const powerSeed = this.field.exp(context.rootOfUnity, bIncrementalDegree);
const psbPowers = this.field.getPowerSeries(powerSeed, context.evaluationDomain.length);
// raise the degree of evaluations and add adjusted evaluations the list
for (let i = 0; i < this.bPoly.count; i++) {
baEvaluations.push(this.field.mulVectorElements(baEvaluations[i], psbPowers));
}
}
this.log('Adjusted degrees of B(x) polynomials');
// 7 ----- Merge boundary constraints into a single polynomial
const bcEvaluations = this.field.combineManyVectors(baEvaluations, this.bCoefficients);
this.log('Computed linear combination of B(x) polynomials');
return this.field.addVectorElements(dEvaluations, bcEvaluations);
}
// VERIFICATION METHODS
// --------------------------------------------------------------------------------------------
evaluateAt(x: bigint, pValues: bigint[], nValues: bigint[], hValues: bigint[], context: VerificationContext): bigint {
// evaluate transition constraints at x
const qValues = context.evaluateConstraintsAt(x, pValues, nValues, hValues);
// adjust transition constraint degrees
for (let { degree, indexes } of this.constraintGroups) {
if (degree === this.combinationDegree) continue;
let constraintIncrementalDegree = BigInt(this.combinationDegree - degree);
let power = this.field.exp(x, constraintIncrementalDegree);
for (let i of indexes) {
qValues.push(this.field.mul(qValues[i], power));
}
}
// merge transition constraint evaluations into a single value
const qVector = this.field.newVectorFrom(qValues);
const qcValue = this.field.combineVectors(qVector, this.dCoefficients);
// compute D(x) = Q(x) / Z(x)
const zValue = this.zPoly.evaluateAt(x);
const dValue = this.field.div(qcValue, zValue);
// evaluate boundary constraints at x
const bValues = this.bPoly.evaluateAt(pValues, x);
// adjust boundary constraint degrees
const bIncrementalDegree = BigInt(this.compositionDegree - context.traceLength);
if (bIncrementalDegree > 0n) {
let power = this.field.exp(x, bIncrementalDegree);
for (let i = 0; i < this.bPoly.count; i++) {
bValues.push(this.field.mul(bValues[i], power));
}
}
// merge boundary constraint evaluations into a single value
const bVector = this.field.newVectorFrom(bValues);
const bValue = this.field.combineVectors(bVector, this.bCoefficients);
return this.field.add(dValue, bValue);
}
}
// HELPER FUNCTIONS
// ================================================================================================
function getCombinationDegree(constraints: ConstraintDescriptor[], traceLength: number) {
let maxConstraintDegree = 1;
for (let constraint of constraints) {
if (maxConstraintDegree < constraint.degree) {
maxConstraintDegree = constraint.degree;
}
}
return 2**Math.ceil(Math.log2(maxConstraintDegree)) * traceLength;
}
function groupTransitionConstraints(constraints: ConstraintDescriptor[], traceLength: number) {
const constraintGroups = new Map<number, number[]>();
for (let i = 0; i < constraints.length; i++) {
let degree = (constraints[i].degree * traceLength);
let group = constraintGroups.get(degree);
if (!group) {
group = [];
constraintGroups.set(degree, group);
}
group.push(i);
}
const result = [] as { degree: number; indexes: number[]; }[];
for (let [degree, indexes] of constraintGroups) {
result.push({ degree, indexes });
}
return result;
} | the_stack |
import {Component, ElementRef, Injector, Input, OnInit, ViewChild} from '@angular/core';
import {AbstractComponent} from '@common/component/abstract.component';
import {PublicType, WorkspaceAdmin} from '@domain/workspace/workspace';
import * as _ from 'lodash';
import {WorkspaceService} from '../../../../../../../workspace/service/workspace.service';
declare let echarts;
declare let moment;
@Component({
selector: 'detail-workspaces-statistics',
templateUrl: './detail-workspace-statistics.component.html'
})
export class DetailWorkspaceStatisticsComponent extends AbstractComponent implements OnInit {
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// 워크스페이스 정보
private _workspace: WorkspaceAdmin;
@ViewChild('itemDistribution')
private _itemDistribution: ElementRef;
@ViewChild('userAccess')
private _userAccess: ElementRef;
// line 차트 옵션
private _lineOption: any = {};
// 파이 차트 옵션
private _pieOption: any = {};
// 기간 범위 리스트
private _periodList: any[] = [];
// 선택된 기간 범위
private _selectedPeriod: any;
// 기간 범위 날짜 리스트
private _rangeList: any[] = [];
// 선택된 기간 범위 날짜
private _selectedRange: any;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
@Input('workspace')
public set setWorkspace(workspace) {
// 워크스페이스 상세정보
this._workspace = workspace;
// 워크스페이스 아이디가 있다면
if (workspace.id) {
// 기간 범위 리스트
this._rangeList = this._getPeriodRangeList();
// 선택된 기간 범위
this._selectedRange = this._rangeList[0];
// 통계 조회
this._getWorkspaceStatistics();
}
}
// 기간 범위 설정 flag
public periodShowFl: boolean = false;
// 기간 범위 날짜 flag
public rangeShowFl: boolean = false;
// book items
public bookItems: object = {};
// 현재 워크스페이스가 즐겨찾기로 등록된 수
public countFavoriteWorkspace: number = 0;
// 대시보드 평균 수
public avgDashboardByWorkBook: number = 0;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Constructor
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// 생성자
constructor(private workspaceService: WorkspaceService,
protected element: ElementRef,
protected injector: Injector) {
super(element, injector);
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Override Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// Init
public ngOnInit() {
// Init
super.ngOnInit();
// ui init
this._initView();
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 기간 범위 설정 변경
* @param period
*/
public onSelectedPeriod(period: any): void {
this._selectedPeriod = period;
// 워크스페이스 통계 조회
this._getWorkspaceStatistics();
}
/**
* 기간 범위 날짜 설정 변경
* @param range
*/
public onSelectedRange(range: any): void {
this._selectedRange = range;
// 워크스페이스 통계 조회
this._getWorkspaceStatistics();
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method - getter
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 아이템 수
* @returns {number}
*/
public getCountBookItems(): number {
return this.getWorkbookCount() + this.getNotebookCount() + this.getWorkbenchCount();
}
/**
* 워크북 수
* @returns {number}
*/
public getWorkbookCount(): number {
return this.bookItems['workbook'] ? this.bookItems['workbook'] : 0;
}
/**
* 노트북 수
* @returns {number}
*/
public getNotebookCount(): number {
return this.bookItems['notebook'] ? this.bookItems['notebook'] : 0;
}
/**
* 워크벤치 수
* @returns {number}
*/
public getWorkbenchCount(): number {
return this.bookItems['workbench'] ? this.bookItems['workbench'] : 0;
}
/**
* 기간 범위 리스트
* @returns {any[]}
*/
public get getPeriodList(): any[] {
return this._periodList;
}
/**
* 선택한 기간 범위
* @returns {any}
*/
public get getSelectedPeriod(): any {
return this._selectedPeriod;
}
/**
* 기간 범위 날짜 리스트
* @returns {any[]}
*/
public get getRangeList(): any[] {
return this._rangeList;
}
/**
* 선택한 기간 범위 날짜
* @returns {any}
*/
public get getSelectedRange(): any {
return this._selectedRange;
}
/**
* 현재 보여자는 기간 범위
* @returns {string}
*/
public getPeriodRange(): string {
if (this._rangeList && this._rangeList.length !== 0) {
return this.isPeriodMonthly() ? this.getSelectedRange.label : this.getOverallPeriod();
}
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method - validation
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 기간 범위 설정이 월별 통계인지 확인
* @returns {boolean}
*/
public isPeriodMonthly(): boolean {
return this.getSelectedPeriod.value === 'MONTHLY';
}
/**
* 워크스페이스가 공유 워크스페이스인지 확인
* @returns {boolean}
*/
public isPublicWorkspace(): boolean {
return this._workspace.publicType === PublicType.SHARED;
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 파이차트 그리기
* @param {ElementRef} chartElement
* @param datas
* @private
*/
private _drawPieChart(chartElement: ElementRef, datas: any) {
const pieChart = echarts.init(chartElement.nativeElement);
pieChart.setOption(this._getPieOption(datas));
pieChart.resize();
}
/**
* 라인차트 그리기
* @param {ElementRef} chartElement
* @param rows
* @param datas
* @private
*/
private _drawLineChart(chartElement: ElementRef, rows: any, datas: any) {
const lineChart = echarts.init(chartElement.nativeElement);
lineChart.setOption(this._getLineOption(rows, datas));
lineChart.resize();
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method - getter
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 전체기간 string
* @returns {string}
*/
private getOverallPeriod(): string {
// range가 2개 이상일때는 기간으로 표기
return this._rangeList.length > 1
? this._rangeList[this._rangeList.length - 1].overall + ' ~ ' + this._rangeList[0].overall
: this._rangeList[0].overall;
}
/**
* 워크스페이스 통계 조회
* @private
*/
private _getWorkspaceStatistics(): void {
// 로딩 show
this.loadingShow();
this.workspaceService.getWorkspaceStatistics(this._workspace.id, this._getStatisticsParams())
.then((result) => {
// books
this.bookItems = result['countBookByType'];
// avgDashboardByWorkBook
this.avgDashboardByWorkBook = this._getAvgDashboardByWorkbook(result['avgDashboardByWorkBook']);
// countFavoriteWorkspace
this.countFavoriteWorkspace = result['countFavoriteWorkspace'];
// distribution chart
this._drawPieChart(this._itemDistribution, this.bookItems);
// user access
this._drawLineChart(this._userAccess, result['viewCountByTime'].rows, result['viewCountByTime'].columns[0].value);
// 로딩 hide
this.loadingHide();
})
.catch(() => {
// 로딩 hide
this.loadingHide();
});
}
/**
* 워크북 안에 있는 대시보드의 평균 수
* @param {number} avgDashboardByWorkbook
* @returns {number}
* @private
*/
private _getAvgDashboardByWorkbook(avgDashboardByWorkbook: number): number {
return _.ceil(avgDashboardByWorkbook, 1);
}
/**
* pie option 설정
* @param {Object} bookItems
* @returns {any}
* @private
*/
private _getPieOption(bookItems: object) {
// pie Option
const pieOption = _.cloneDeep(this._pieOption);
// data
for (const item in bookItems) {
if (item) {
// item label
const label = this._getBookLabel(item);
// 범례
pieOption.legend.data.push(label);
// 데이터
pieOption.series[0].data.push({value: bookItems[item], name: label});
}
}
return pieOption;
}
/**
* line option 설정
* @param rows
* @param datas
* @returns {Object}
* @private
*/
private _getLineOption(rows: any, datas: any): object {
// line Option
const lineOption = _.cloneDeep(this._lineOption);
// xAxis 데이터
lineOption.xAxis.data = rows;
// series 데이터
lineOption.series[0].data = datas;
return lineOption;
}
/**
* book label string
* @param {string} item
* @returns {string}
* @private
*/
private _getBookLabel(item: string): string {
switch (item) {
case 'workbook':
return this.translateService.instant('msg.spaces.spaces.detail.statistics.ui.workbook');
case 'notebook':
return this.translateService.instant('msg.spaces.spaces.detail.statistics.ui.notebook');
case 'workbench':
return this.translateService.instant('msg.spaces.spaces.detail.statistics.ui.workbench');
case 'folder':
return this.translateService.instant('msg.spaces.spaces.detail.statistics.ui.folder');
}
}
/**
* 통계 조회 파라메터
* @returns {Object}
* @private
*/
private _getStatisticsParams(): object {
const params = {};
// 전체기간 조회의 경우
if (this._selectedPeriod.value === 'OVERALL') {
params['timeUnit'] = 'month';
params['from'] = this._workspace.createdTime;
params['to'] = moment().format('YYYY-MM-DDTHH:mm:ss') + '.000Z';
} else {
params['timeUnit'] = 'day';
params['from'] = this._selectedRange.startTime;
params['to'] = this._selectedRange.endTime;
}
return params
}
/**
* 통계용 워크스페이스 기간 범위 리스트
* @returns {Array}
* @private
*/
private _getPeriodRangeList() {
// 범위리스트
const rangeList = [];
// 시작타임 (워크스페이스 생성시간)
let startTime = moment(this._workspace.createdTime);
// 종료타임 (현재시간)
const endTime = moment();
// 루프 플래그 (현재시간보다 작을때 반복)
let loopFl: boolean = (startTime.format('YYYY-MM') < endTime.format('YYYY-MM'));
// make rangeList
while (loopFl) {
rangeList.unshift({
label: startTime.format('MMMM YYYY'),
overall: startTime.format('D MMM YYYY'),
startTime: startTime.startOf('month').format('YYYY-MM-DDTHH:mm:ss') + '.000Z',
endTime: startTime.endOf('month').format('YYYY-MM-DDTHH:mm:ss') + '.5959Z'
});
// 시작타임 월 증가
startTime = startTime.add(1, 'month');
// 루프 플래그 재설정
loopFl = startTime.format('YYYY-MM') < endTime.format('YYYY-MM');
}
// 가장 최근 값
rangeList.unshift({
label: moment().format('MMMM YYYY'),
overall: moment().format('D MMM YYYY'),
startTime: moment().startOf('month').format('YYYY-MM-DDTHH:mm:ss') + '.000Z',
endTime: moment().format('YYYY-MM-DDTHH:mm:ss') + '.5959Z'
});
return rangeList;
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method - init
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* ui init
* @private
*/
private _initView(): void {
// pie option
this._pieOption = {
tooltip: {
formatter: '{c} ({d}%)'
},
legend: {
orient: 'horizontal',
y: 'bottom',
data: []
},
series: [
{
// 시계방향으로 큰 값 순서대로 그림
clockwise: false,
type: 'pie',
radius: ['50%', '70%'],
label: {
normal: {
show: false,
position: 'center'
},
},
data: []
}
]
};
// line option
this._lineOption = {
color: ['#2D5FDB'],
xAxis: {
type: 'category',
data: []
},
yAxis: {
type: 'value'
},
series: [{
data: [],
type: 'line'
}]
};
// period list
this._periodList = [
{label: this.translateService.instant('msg.spaces.shared.detail.statistics.monthly'), value: 'MONTHLY'},
{label: this.translateService.instant('msg.spaces.shared.detail.statistics.overall'), value: 'OVERALL'},
];
// period default는 monthly
this._selectedPeriod = this._periodList[0];
// range list
this._rangeList = [];
// range default는 이번달
this._selectedRange = null;
// 통계 관련 init
this._initStatistics();
}
/**
* 통계 관련 변수 init
* @private
*/
private _initStatistics(): void {
// 통계
this.countFavoriteWorkspace = 0;
this.avgDashboardByWorkBook = 0;
this.bookItems = {};
}
} | the_stack |
import { Point, Rectangle } from '@pixi/math';
import { Plugin } from './Plugin';
import ease from '../ease';
import type { Drag } from './Drag';
import type { IDecelerateOptions } from './Decelerate';
import type { Pinch } from './Pinch';
import type { Viewport } from '../Viewport';
/** Options for {@link Bounce}. */
export interface IBounceOptions {
/** "all", "horizontal", "vertical", or combination of "top", "bottom", "right", "left" (e.g., 'top-bottom-right') */
sides?:
'all'
| 'horizontal'
| 'vertical'
| string;
/** Friction to apply to decelerate if active */
friction?: number;
/** Time in ms to finish bounce */
time?: number;
/** Use this bounceBox instead of (0, 0, viewport.worldWidth, viewport.worldHeight) */
bounceBox?: Rectangle | null;
/** Ease function or name (see http://easings.net/ for supported names) */
ease?: any;
/** (top/bottom/center and left/right/center, or center) where to place world if too small for screen */
underflow?: 'center' | string;
}
/** Bounce state along an axis */
export interface IBounceState {
/** Elapsed time since bounce started */
time: number;
/** Starting coordinate */
start: number;
/** Change in coordinate through bounce */
delta: number;
/** Ending coordinate */
end: number;
}
const DEFAULT_BOUNCE_OPTIONS: Required<IBounceOptions> = {
sides: 'all',
friction: 0.5,
time: 150,
ease: 'easeInOutSine',
underflow: 'center',
bounceBox: null
};
/**
* @fires bounce-start-x
* @fires bounce.end-x
* @fires bounce-start-y
* @fires bounce-end-y
* @public
*/
export class Bounce extends Plugin
{
/** The options passed to initialize this plugin, cannot be modified again. */
public readonly options: Readonly<Required<IBounceOptions>>;
/** Holds whether to bounce from left side. */
public readonly left: boolean ;
/** Holds whether to bounce from top side. */
public readonly top: boolean;
/** Holds whether to bounce from right side. */
public readonly right: boolean;
/** Holds whether to bounce from bottom side. */
public readonly bottom: boolean;
/** Direction of underflow along x-axis. */
public readonly underflowX: -1 | 0 | 1;
/** Direction of underflow along y-axis. */
public readonly underflowY: -1 | 0 | 1;
/** Easing */
protected ease: any;
/** Bounce state along x-axis */
protected toX!: IBounceState | null;
/** Bounce state along y-axis */
protected toY!: IBounceState | null;
/**
* This is called by {@link Viewport.bounce}.
*/
constructor(parent: Viewport, options: IBounceOptions = {})
{
super(parent);
this.options = Object.assign({}, DEFAULT_BOUNCE_OPTIONS, options);
this.ease = ease(this.options.ease, 'easeInOutSine');
if (this.options.sides)
{
if (this.options.sides === 'all')
{
this.top = this.bottom = this.left = this.right = true;
}
else if (this.options.sides === 'horizontal')
{
this.right = this.left = true;
this.top = this.bottom = false;
}
else if (this.options.sides === 'vertical')
{
this.left = this.right = false;
this.top = this.bottom = true;
}
else
{
this.top = this.options.sides.indexOf('top') !== -1;
this.bottom = this.options.sides.indexOf('bottom') !== -1;
this.left = this.options.sides.indexOf('left') !== -1;
this.right = this.options.sides.indexOf('right') !== -1;
}
} else {
this.left = this.top = this.right = this.bottom = false;
}
const clamp = this.options.underflow.toLowerCase();
if (clamp === 'center')
{
this.underflowX = 0;
this.underflowY = 0;
}
else
{
this.underflowX = (clamp.indexOf('left') !== -1) ? -1 : (clamp.indexOf('right') !== -1) ? 1 : 0;
this.underflowY = (clamp.indexOf('top') !== -1) ? -1 : (clamp.indexOf('bottom') !== -1) ? 1 : 0;
}
this.reset();
}
public isActive(): boolean
{
return this.toX !== null || this.toY !== null;
}
public down(): boolean
{
this.toX = this.toY = null;
return false;
}
public up(): boolean
{
this.bounce();
return false;
}
public update(elapsed: number): void
{
if (this.paused)
{
return;
}
this.bounce();
if (this.toX)
{
const toX = this.toX;
toX.time += elapsed;
this.parent.emit('moved', { viewport: this.parent, type: 'bounce-x' });
if (toX.time >= this.options.time)
{
this.parent.x = toX.end;
this.toX = null;
this.parent.emit('bounce-x-end', this.parent);
}
else
{
this.parent.x = this.ease(toX.time, toX.start, toX.delta, this.options.time);
}
}
if (this.toY)
{
const toY = this.toY;
toY.time += elapsed;
this.parent.emit('moved', { viewport: this.parent, type: 'bounce-y' });
if (toY.time >= this.options.time)
{
this.parent.y = toY.end;
this.toY = null;
this.parent.emit('bounce-y-end', this.parent);
}
else
{
this.parent.y = this.ease(toY.time, toY.start, toY.delta, this.options.time);
}
}
}
/** @internal */
protected calcUnderflowX(): number
{
let x: number;
switch (this.underflowX)
{
case -1:
x = 0;
break;
case 1:
x = (this.parent.screenWidth - this.parent.screenWorldWidth);
break;
default:
x = (this.parent.screenWidth - this.parent.screenWorldWidth) / 2;
}
return x;
}
/** @internal */
protected calcUnderflowY(): number
{
let y: number;
switch (this.underflowY)
{
case -1:
y = 0;
break;
case 1:
y = (this.parent.screenHeight - this.parent.screenWorldHeight);
break;
default:
y = (this.parent.screenHeight - this.parent.screenWorldHeight) / 2;
}
return y;
}
private oob(): Record<'left' | 'right' | 'top' | 'bottom', boolean> & Record<'topLeft' | 'bottomRight', Point>
{
const box = this.options.bounceBox;
if (box)
{
const x1 = typeof box.x === 'undefined' ? 0 : box.x;
const y1 = typeof box.y === 'undefined' ? 0 : box.y;
const width = typeof box.width === 'undefined' ? this.parent.worldWidth : box.width;
const height = typeof box.height === 'undefined' ? this.parent.worldHeight : box.height;
return {
left: this.parent.left < x1,
right: this.parent.right > width,
top: this.parent.top < y1,
bottom: this.parent.bottom > height,
topLeft: new Point(
x1 * this.parent.scale.x,
y1 * this.parent.scale.y
),
bottomRight: new Point(
width * this.parent.scale.x - this.parent.screenWidth,
height * this.parent.scale.y - this.parent.screenHeight
)
};
}
return {
left: this.parent.left < 0,
right: this.parent.right > this.parent.worldWidth,
top: this.parent.top < 0,
bottom: this.parent.bottom > this.parent.worldHeight,
topLeft: new Point(0, 0),
bottomRight: new Point(
this.parent.worldWidth * this.parent.scale.x - this.parent.screenWidth,
this.parent.worldHeight * this.parent.scale.y - this.parent.screenHeight
)
};
}
public bounce(): void
{
if (this.paused)
{
return;
}
let oob;
let decelerate: undefined | null | {
percentChangeX?: number;
percentChangeY?: number;
x?: number;
y?: number;
options?: IDecelerateOptions
} = this.parent.plugins.get('decelerate', true) as any;
if (decelerate && (decelerate.x || decelerate.y))
{
if ((decelerate.x && decelerate.percentChangeX === decelerate.options?.friction) || (decelerate.y && decelerate.percentChangeY === decelerate.options?.friction))
{
oob = this.oob();
if ((oob.left && this.left) || (oob.right && this.right))
{
decelerate.percentChangeX = this.options.friction;
}
if ((oob.top && this.top) || (oob.bottom && this.bottom))
{
decelerate.percentChangeY = this.options.friction;
}
}
}
const drag: Partial<Drag> = this.parent.plugins.get('drag', true) || {};
const pinch: Partial<Pinch> = this.parent.plugins.get('pinch', true) || {};
decelerate = decelerate || {};
if (!drag?.active && !pinch?.active && ((!this.toX || !this.toY) && (!decelerate.x || !decelerate.y)))
{
oob = oob || this.oob();
const topLeft = oob.topLeft;
const bottomRight = oob.bottomRight;
if (!this.toX && !decelerate.x)
{
let x = null;
if (oob.left && this.left)
{
x = (this.parent.screenWorldWidth < this.parent.screenWidth) ? this.calcUnderflowX() : -topLeft.x;
}
else if (oob.right && this.right)
{
x = (this.parent.screenWorldWidth < this.parent.screenWidth) ? this.calcUnderflowX() : -bottomRight.x;
}
if (x !== null && this.parent.x !== x)
{
this.toX = { time: 0, start: this.parent.x, delta: x - this.parent.x, end: x };
this.parent.emit('bounce-x-start', this.parent);
}
}
if (!this.toY && !decelerate.y)
{
let y = null;
if (oob.top && this.top)
{
y = (this.parent.screenWorldHeight < this.parent.screenHeight) ? this.calcUnderflowY() : -topLeft.y;
}
else if (oob.bottom && this.bottom)
{
y = (this.parent.screenWorldHeight < this.parent.screenHeight) ? this.calcUnderflowY() : -bottomRight.y;
}
if (y !== null && this.parent.y !== y)
{
this.toY = { time: 0, start: this.parent.y, delta: y - this.parent.y, end: y };
this.parent.emit('bounce-y-start', this.parent);
}
}
}
}
public reset(): void
{
this.toX = this.toY = null;
this.bounce();
}
} | the_stack |
import {Mutable, Class, Lazy, AnyTiming, Timing} from "@swim/util";
import {Affinity, MemberFastenerClass} from "@swim/component";
import {Length} from "@swim/math";
import {Expansion} from "@swim/style";
import {Look, ThemeAnimator, ExpansionThemeAnimator} from "@swim/theme";
import {
ModalOptions,
ModalState,
Modal,
PositionGestureInput,
PositionGesture,
ViewContextType,
View,
} from "@swim/view";
import {StyleAnimator, ViewNode, HtmlView} from "@swim/dom";
import {Graphics, VectorIcon} from "@swim/graphics";
import {FloatingButton} from "./FloatingButton";
import {ButtonItem} from "./ButtonItem";
import type {ButtonStackObserver} from "./ButtonStackObserver";
/** @public */
export class ButtonStack extends HtmlView implements Modal {
constructor(node: HTMLElement) {
super(node);
this.stackHeight = 0;
this.onClick = this.onClick.bind(this);
this.onContextMenu = this.onContextMenu.bind(this);
this.initButtonStack();
this.initButton();
}
protected initButtonStack(): void {
this.addClass("button-stack");
this.display.setState("block", Affinity.Intrinsic);
this.position.setState("relative", Affinity.Intrinsic);
this.width.setState(56, Affinity.Intrinsic);
this.height.setState(56, Affinity.Intrinsic);
this.opacity.setState(1, Affinity.Intrinsic);
this.userSelect.setState("none", Affinity.Intrinsic);
this.cursor.setState("pointer", Affinity.Intrinsic);
}
protected initButton(): void {
const button = this.createButton();
if (button !== null) {
this.appendChild(button, "button");
}
}
override readonly observerType?: Class<ButtonStackObserver>;
/** @internal */
readonly stackHeight: number;
protected createButton(): HtmlView | null {
return FloatingButton.create();
}
@PositionGesture<ButtonStack, HtmlView>({
didMovePress(input: PositionGestureInput, event: Event | null): void {
if (!input.defaultPrevented && !this.owner.disclosure.expanded) {
const stackHeight = this.owner.stackHeight;
const phase = Math.min(Math.max(0, -(input.y - input.y0) / (0.5 * stackHeight)), 1);
this.owner.disclosure.setPhase(phase);
if (phase > 0.1) {
input.clearHoldTimer();
if (!this.owner.disclosure.expanding) {
this.owner.disclosure.setState(this.owner.disclosure.value.asExpanding());
}
}
}
},
didEndPress(input: PositionGestureInput, event: Event | null): void {
if (!input.defaultPrevented) {
const phase = this.owner.disclosure.getPhase();
if (input.t - input.t0 < input.holdDelay) {
if (phase < 0.1 || this.owner.disclosure.expanded) {
this.owner.disclosure.collapse();
} else {
this.owner.disclosure.expand();
}
} else {
if (phase < 0.5) {
this.owner.disclosure.collapse();
} else if (phase >= 0.5) {
this.owner.disclosure.expand();
}
}
}
},
didCancelPress(input: PositionGestureInput, event: Event | null): void {
if (input.buttons === 2) {
this.owner.disclosure.toggle();
} else {
const phase = this.owner.disclosure.getPhase();
if (phase < 0.1 || this.owner.disclosure.expanded) {
this.owner.disclosure.collapse();
} else {
this.owner.disclosure.expand();
}
}
},
didLongPress(input: PositionGestureInput): void {
input.preventDefault();
this.owner.disclosure.toggle();
},
})
readonly gesture!: PositionGesture<this, HtmlView>;
static readonly gesture: MemberFastenerClass<ButtonStack, "gesture">;
@ThemeAnimator<ButtonStack, Expansion>({
type: Expansion,
value: Expansion.collapsed(),
updateFlags: View.NeedsLayout,
willExpand(): void {
this.owner.willExpand();
this.owner.onExpand();
},
didExpand(): void {
this.owner.didExpand();
},
willCollapse(): void {
this.owner.willCollapse();
this.owner.onCollapse();
},
didCollapse(): void {
this.owner.didCollapse();
},
})
readonly disclosure!: ExpansionThemeAnimator<this>;
@ThemeAnimator({type: Number, value: 28, updateFlags: View.NeedsLayout})
readonly buttonSpacing!: ThemeAnimator<this, number>;
@ThemeAnimator({type: Number, value: 20, updateFlags: View.NeedsLayout})
readonly itemSpacing!: ThemeAnimator<this, number>;
@StyleAnimator<ButtonStack, number | undefined>({
propertyNames: "opacity",
type: Number,
didTransition(opacity: number | undefined): void {
if (opacity === 1) {
this.owner.didShowStack();
} else if (opacity === 0) {
this.owner.didHideStack();
}
},
})
override readonly opacity!: StyleAnimator<this, number | undefined>;
get modalView(): View | null {
return null;
}
get modalState(): ModalState {
return this.disclosure.modalState! as ModalState;
}
get modality(): boolean | number {
return this.disclosure.phase!;
}
showModal(options: ModalOptions, timing?: AnyTiming | boolean): void {
this.disclosure.expand(timing);
}
hideModal(timing?: AnyTiming | boolean): void {
this.disclosure.collapse(timing);
}
get button(): HtmlView | null {
const childView = this.getChild("button");
return childView instanceof HtmlView ? childView : null;
}
get closeIcon(): Graphics {
return ButtonStack.closeIcon;
}
get items(): ReadonlyArray<ButtonItem> {
const childNodes = this.node.childNodes;
const children = [];
for (let i = 0, n = childNodes.length; i < n; i += 1) {
const childView = (childNodes[i] as ViewNode).view;
if (childView instanceof ButtonItem) {
children.push(childView);
}
}
return children;
}
insertItem(item: ButtonItem, index?: number, key?: string): void {
if (index === void 0) {
index = this.node.childNodes.length - 1;
}
this.insertChild(item.node, this.node.childNodes[1 + index] || null, key);
}
removeItems(): void {
const childNodes = this.node.childNodes;
for (let i = childNodes.length - 1; i >= 0; i -= 1) {
const childView = (childNodes[i] as ViewNode).view;
if (childView instanceof ButtonItem) {
this.removeChild(childView);
}
}
}
protected override onMount(): void {
super.onMount();
this.on("click", this.onClick);
this.on("contextmenu", this.onContextMenu);
}
protected override onUnmount(): void {
this.off("click", this.onClick);
this.off("contextmenu", this.onContextMenu);
super.onUnmount();
}
protected override onLayout(viewContext: ViewContextType<this>): void {
super.onLayout(viewContext);
this.layoutStack();
const modalService = this.modalProvider.service;
if (modalService !== void 0 && modalService !== null) {
modalService.updateModality();
}
}
protected layoutStack(): void {
const phase = this.disclosure.getPhase();
const childNodes = this.node.childNodes;
const childCount = childNodes.length;
const button = this.button;
let zIndex = childCount - 1;
let itemIndex = 0;
let stackHeight = 0;
let y: number;
if (button !== null) {
button.zIndex.setState(childCount, Affinity.Intrinsic);
const buttonHeight = button !== null ? button.height.value : void 0;
y = buttonHeight instanceof Length
? buttonHeight.pxValue()
: button.node.offsetHeight;
} else {
y = 0;
}
const buttonSpacing = this.buttonSpacing.value;
const itemSpacing = this.itemSpacing.value;
for (let i = 0; i < childCount; i += 1) {
const childView = (childNodes[i] as ViewNode).view;
if (childView instanceof ButtonItem) {
if (itemIndex === 0) {
stackHeight += buttonSpacing;
y += buttonSpacing;
} else {
stackHeight += itemSpacing;
y += itemSpacing;
}
const itemHeight = childView.height.value;
const dy = itemHeight instanceof Length
? itemHeight.pxValue()
: childView.node.offsetHeight;
childView.display.setState(phase === 0 ? "none" : "flex", Affinity.Intrinsic);
childView.bottom.setState(phase * y, Affinity.Intrinsic);
childView.zIndex.setState(zIndex, Affinity.Intrinsic);
y += dy;
stackHeight += dy;
itemIndex += 1;
zIndex -= 1;
}
}
(this as Mutable<this>).stackHeight = stackHeight;
}
protected override onInsertChild(childView: View, targetView: View | null): void {
super.onInsertChild(childView, targetView);
const childKey = childView.key;
if (childKey === "button" && childView instanceof HtmlView) {
this.onInsertButton(childView);
} else if (childView instanceof ButtonItem) {
this.onInsertItem(childView);
}
}
protected override onRemoveChild(childView: View): void {
const childKey = childView.key;
if (childKey === "button" && childView instanceof HtmlView) {
this.onRemoveButton(childView);
} else if (childView instanceof ButtonItem) {
this.onRemoveItem(childView);
}
super.onRemoveChild(childView);
}
protected onInsertButton(button: HtmlView): void {
this.gesture.setView(button);
if (button instanceof FloatingButton) {
button.disclosure.setState(Expansion.expanded(), Affinity.Intrinsic);
if (this.disclosure.expanded || this.disclosure.expanding) {
button.pushIcon(this.closeIcon);
}
}
button.zIndex.setState(0, Affinity.Intrinsic);
}
protected onRemoveButton(button: HtmlView): void {
this.gesture.setView(null);
}
protected onInsertItem(item: ButtonItem): void {
item.position.setState("absolute", Affinity.Intrinsic);
item.right.setState(8, Affinity.Intrinsic);
item.bottom.setState(8, Affinity.Intrinsic);
item.left.setState(8, Affinity.Intrinsic);
item.zIndex.setState(0, Affinity.Intrinsic);
}
protected onRemoveItem(item: ButtonItem): void {
// hook
}
protected willExpand(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackWillExpand !== void 0) {
observer.buttonStackWillExpand(this);
}
}
}
protected onExpand(): void {
const button = this.button;
if (button instanceof FloatingButton) {
const timing = this.disclosure.timing;
button.pushIcon(this.closeIcon, timing !== null ? timing : void 0);
}
this.modalProvider.presentModal(this);
}
protected didExpand(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackDidExpand !== void 0) {
observer.buttonStackDidExpand(this);
}
}
}
protected willCollapse(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackWillCollapse !== void 0) {
observer.buttonStackWillCollapse(this);
}
}
}
protected onCollapse(): void {
this.modalProvider.dismissModal(this);
const button = this.button;
if (button instanceof FloatingButton && button.iconCount > 1) {
const timing = this.disclosure.timing;
button.popIcon(timing !== null ? timing : void 0);
}
}
protected didCollapse(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackDidCollapse !== void 0) {
observer.buttonStackDidCollapse(this);
}
}
}
show(timing?: AnyTiming | boolean): void {
if (this.opacity.state !== 1) {
if (timing === void 0 || timing === true) {
timing = this.getLookOr(Look.timing, false);
} else {
timing = Timing.fromAny(timing);
}
this.willShowStack();
if (timing !== false) {
this.opacity.setState(1, timing, Affinity.Intrinsic);
} else {
this.opacity.setState(1, Affinity.Intrinsic);
this.didShowStack();
}
}
}
protected willShowStack(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackWillShow !== void 0) {
observer.buttonStackWillShow(this);
}
}
this.display("block");
}
protected didShowStack(): void {
this.requireUpdate(View.NeedsLayout);
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackDidShow !== void 0) {
observer.buttonStackDidShow(this);
}
}
}
hide(timing?: AnyTiming | boolean): void {
if (this.opacity.state !== 0) {
if (timing === void 0 || timing === true) {
timing = this.getLookOr(Look.timing, false);
} else {
timing = Timing.fromAny(timing);
}
this.willHideStack();
if (timing !== false) {
this.opacity.setState(0, timing, Affinity.Intrinsic);
} else {
this.opacity.setState(0, Affinity.Intrinsic);
this.didHideStack();
}
}
}
protected willHideStack(): void {
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackWillHide !== void 0) {
observer.buttonStackWillHide(this);
}
}
}
protected didHideStack(): void {
this.display("none");
this.requireUpdate(View.NeedsLayout);
const observers = this.observers;
for (let i = 0, n = observers.length; i < n; i += 1) {
const observer = observers[i]!;
if (observer.buttonStackDidHide !== void 0) {
observer.buttonStackDidHide(this);
}
}
}
protected onClick(event: MouseEvent): void {
if (event.target === this.button?.node) {
event.stopPropagation();
}
}
protected onContextMenu(event: MouseEvent): void {
event.preventDefault();
}
@Lazy
static get closeIcon(): Graphics {
return VectorIcon.create(24, 24, "M19,6.4L17.6,5L12,10.6L6.4,5L5,6.4L10.6,12L5,17.6L6.4,19L12,13.4L17.6,19L19,17.6L13.4,12Z");
}
} | the_stack |
import nextTick = require('./nextTick');
type Callback = (...args: any[]) => void;
function isPromise(value: any): boolean {
return value && typeof value.then === 'function';
}
function runCallbacks(callbacks: Array<Callback>, ...args: any[]): void {
for (var i = 0, callback: Callback; callback = callbacks[i]; ++i) {
callback.apply(null, args);
}
}
/**
* A Promise represents the result of an asynchronous operation. When writing a function that performs an asynchronous
* operation, instead of writing the function to accept a callback function, you should instead write it to return a
* Promise that is fulfilled once the asynchronous operation is completed. Returning a promise instead of accepting
* a callback provides a standard mechanism for handling asynchronous operations that offers the following benefits
* over a normal callback function:
*
* 1. Multiple callbacks can be added for a single function invocation;
* 2. Other asynchronous operations can be easily chained to the response, avoiding callback pyramids;
* 3. Asynchronous operations can be canceled in flight in a standard way if their results are no longer needed by the
* rest of the application.
*
* The Promise class is a modified, extended version of standard EcmaScript 6 promises. This implementation
* intentionally deviates from the ES6 2014-05-22 draft in the following ways:
*
* 1. `Promise.race` is a worthless API with one use case, so is not implemented.
* 2. `Promise.all` accepts an object in addition to an array.
* 3. Asynchronous operations can transmit partial progress information through a third `progress` method passed to the
* initializer. Progress listeners can be added by passing a third `onProgress` callback to `then`, or through the
* extra `progress` method exposed on promises.
* 4. Promises can be canceled by calling the `cancel` method of a promise.
*/
class Promise<T> {
/**
* Converts an iterable object containing promises into a single promise that resolves to a new iterable object
* containing all the fulfilled properties of the original object. Properties that do not contain promises are
* passed through as-is.
*
* @example
* Promise.all([ Promise.resolve('foo'), 'bar' ]).then(function (value) {
* value[0] === 'foo'; // true
* value[1] === 'bar'; // true
* });
*
* @example
* Promise.all({
* foo: Promise.resolve('foo'),
* bar: 'bar'
* }).then(function (value) {
* value.foo === 'foo'; // true
* value.bar === 'bar'; // true
* });
*/
static all<T>(iterable: { [key: string]: Promise.Thenable<T> | T; }): Promise<{ [key: string]: T; }>;
static all<T>(iterable: Array<Promise.Thenable<T> | T>): Promise<T[]>;
static all(iterable: any): Promise<any> {
return new this(function (resolve, reject, progress, setCanceler) {
setCanceler(function (reason: Error) {
walkIterable(function (key: string, value: any) {
if (value && value.cancel) {
value.cancel(reason);
}
});
throw reason;
});
function fulfill(key: string, value: any) {
values[key] = value;
progress(values);
++complete;
finish();
}
function finish() {
if (populating || complete < total) {
return;
}
resolve(values);
}
function processItem(key: string, value: any) {
++total;
if (isPromise(value)) {
value.then(fulfill.bind(null, key), reject);
}
else {
fulfill(key, value);
}
}
function walkIterable(callback: (key: string, value: any) => void) {
if (Array.isArray(iterable)) {
for (var i = 0, j = iterable.length; i < j; ++i) {
if (i in iterable) {
callback(String(i), iterable[i]);
}
}
}
else {
for (var key in iterable) {
callback(key, iterable[key]);
}
}
}
var values: any = Array.isArray(iterable) ? [] : {};
var complete = 0;
var total = 0;
var populating = true;
walkIterable(processItem);
populating = false;
finish();
});
}
/**
* Creates a new promise that is pre-rejected with the given error.
*/
static reject(error?: Error): Promise<any> {
return new this<any>(function (resolve, reject) {
reject(error);
});
}
/**
* Creates a new promise that is pre-resolved with the given value. If the passed value is already a promise, it
* will be returned as-is.
*/
static resolve<T>(value?: Promise.Thenable<T> | T): Promise<T> {
if (value instanceof Promise) {
return value;
}
return new this<T>(function (resolve) {
resolve(value);
});
}
/**
* Creates a new Promise.
*
* @constructor
*
* @param initializer
* The initializer function is called immediately when the Promise is instantiated. It is responsible for starting
* the asynchronous operation when it is invoked.
*
* The initializer must call either the passed `resolve` function when the asynchronous operation has completed
* successfully, or the `reject` function when the operation fails, unless the the `canceler` is called first.
*
* The `progress` function can also be called zero or more times to provide information about the process of the
* operation to any interested consumers.
*
* Finally, the initializer can register an canceler function that cancels the asynchronous operation by passing
* the canceler function to the `setCanceler` function.
*/
constructor(
initializer: (
resolve?: (value?: Promise.Thenable<T> | T) => void,
reject?: (error?: Error) => void,
progress?: (data?: any) => void,
setCanceler?: (canceler: Promise.Canceler) => void
) => void
) {
/**
* The current state of this promise.
*/
var state: Promise.State = Promise.State.PENDING;
Object.defineProperty(this, 'state', {
get: function () {
return state;
}
});
/**
* Whether or not this promise is in a resolved state.
*/
function isResolved(): boolean {
return state !== Promise.State.PENDING || isChained;
}
/**
* If true, the resolution of this promise is chained to another promise.
*/
var isChained: boolean = false;
/**
* The resolved value for this promise.
*/
var resolvedValue: T | Error;
/**
* Callbacks that should be invoked once the asynchronous operation has completed.
*/
var callbacks: Array<() => void> = [];
var whenFinished = function (callback: () => void) {
callbacks.push(callback);
};
/**
* Callbacks that should be invoked when the asynchronous operation has progressed.
*/
var progressCallbacks: Array<(data?: any) => void> = [];
var whenProgress = function (callback: (data?: any) => void) {
progressCallbacks.push(callback);
};
/**
* A canceler function that will be used to cancel resolution of this promise.
*/
var canceler: Promise.Canceler;
/**
* Queues a callback for execution during the next round through the event loop, in a way such that if a
* new execution is queued for this promise during queue processing, it will execute immediately instead of
* being forced to wait through another turn through the event loop.
* TODO: Ensure this is actually necessary for optimal execution and does not break next-turn spec compliance.
*
* @method
* @param callback The callback to execute on the next turn through the event loop.
*/
var enqueue = (function () {
function originalSchedule() {
schedule = function () {};
nextTick(function run() {
try {
var callback: Callback;
while ((callback = queue.shift())) {
callback();
}
}
finally {
// If someone threw an error, allow it to bubble, then continue queue execution for the
// remaining items
if (queue.length) {
run();
}
else {
schedule = originalSchedule;
}
}
});
}
var queue: Array<Callback> = [];
var schedule = originalSchedule;
return function (callback: Callback) {
queue.push(callback);
schedule();
};
})();
/**
* Resolves this promise.
*
* @param newState The resolved state for this promise.
* @param {T|Error} value The resolved value for this promise.
*/
var resolve = function (newState: Promise.State, value: any) {
if (isResolved()) {
return;
}
if (isPromise(value)) {
if (value === this) {
settle(Promise.State.REJECTED, new TypeError('Cannot chain a promise to itself'));
return;
}
try {
value.then(
settle.bind(null, Promise.State.FULFILLED),
settle.bind(null, Promise.State.REJECTED)
);
isChained = true;
// If this promise resolution has been chained to another promise A, and then someone calls `then`
// on this promise to create promise B, then calls `cancel` on promise B, it needs to propagate
// back to cancel the chained promise A
canceler = value.cancel;
}
catch (error) {
settle(Promise.State.REJECTED, error);
return;
}
}
else {
settle(newState, value);
}
}.bind(this);
/**
* Settles this promise.
*
* @param newState The resolved state for this promise.
* @param {T|Error} value The resolved value for this promise.
*/
function settle(newState: Promise.State, value: any) {
if (state !== Promise.State.PENDING) {
return;
}
state = newState;
resolvedValue = value;
whenFinished = enqueue;
whenProgress = function () {};
enqueue(function () {
runCallbacks(callbacks);
callbacks = progressCallbacks = null;
});
}
this.cancel = function (reason?) {
// Settled promises are 100% finished and cannot be cancelled
if (state !== Promise.State.PENDING) {
return;
}
if (!reason) {
reason = new Error('Cancelled');
reason.name = 'CancelError';
}
// The promise is non-cancellable so just reject it for consistency (this will at least bypass any more
// code that would run in chained success callbacks)
if (!canceler) {
settle(Promise.State.REJECTED, reason);
return Promise.reject(reason);
}
try {
// The canceller has a last opportunity to fulfill the promise with some data (for example something
// from an outdated local cache), or else should throw the reason
resolve(Promise.State.FULFILLED, canceler(reason));
}
catch (error) {
settle(Promise.State.REJECTED, error);
// If a promise was chained so another promise adopted this `this.cancel` as its canceler, we need to
// provide a rejection, otherwise that other promise will believe the cancelation resulted in a
// successful value of `undefined`
// TODO: There should be a better way to do this
// TODO: Test
return Promise.reject(error);
}
};
this.then = function <U>(
onFulfilled?: (value?: T) => Promise.Thenable<U> | U,
onRejected?: (error?: Error) => Promise.Thenable<U> | U,
onProgress?: (data?: any) => any
): Promise<U> {
return new Promise<U>(function (resolve, reject, progress, setCanceler) {
setCanceler(function (reason) {
if (canceler) {
resolve(canceler(reason));
}
else {
throw reason;
}
});
whenProgress(function (data) {
try {
if (typeof onProgress === 'function') {
progress(onProgress(data));
}
else {
progress(data);
}
}
catch (error) {
if (error.name !== 'StopProgressPropagation') {
throw error;
}
}
});
whenFinished(function () {
var callback: (value?: any) => any = state === Promise.State.REJECTED ? onRejected : onFulfilled;
if (typeof callback === 'function') {
try {
resolve(callback(resolvedValue));
}
catch (error) {
reject(error);
}
}
else if (state === Promise.State.REJECTED) {
reject(<Error> resolvedValue);
}
else {
// Assume T and U are compatible types
resolve(<any> resolvedValue);
}
});
});
};
try {
initializer(
resolve.bind(null, Promise.State.FULFILLED),
resolve.bind(null, Promise.State.REJECTED),
function (data) {
if (state === Promise.State.PENDING) {
enqueue(runCallbacks.bind(null, progressCallbacks, data));
}
},
function (value) {
if (!isResolved()) {
canceler = value;
}
}
);
}
catch (error) {
settle(Promise.State.REJECTED, error);
}
}
/**
* The current state of the promise.
*
* @readonly
*/
state: Promise.State;
/**
* Cancels any pending asynchronous operation of the promise.
*
* @method
* @param reason
* A specific reason for failing the operation. If no reason is provided, a default `CancelError` error will be
* used.
*/
cancel: (reason?: Error, source?: Promise<any>) => void;
/**
* Adds a callback to the promise to be invoked when the asynchronous operation throws an error.
*/
catch<U>(onRejected: (error?: Error) => Promise.Thenable<U> | U): Promise<U> {
return this.then<U>(null, onRejected);
}
/**
* Adds a callback to the promise to be invoked regardless of whether or not the asynchronous operation completed
* successfully. The callback can throw or return a new value to replace the original value, or if it returns
* undefined, the original value or error will be passed through.
*/
finally<U>(onFulfilledOrRejected: () => Promise.Thenable<U> | U): Promise<U> {
function getFinalValue(defaultCallback: () => U): Promise.Thenable<U> | U {
var returnValue = onFulfilledOrRejected();
if (returnValue === undefined) {
return defaultCallback();
}
else if (returnValue && (<Promise.Thenable<U>> returnValue).then) {
return (<Promise.Thenable<U>> returnValue).then(function (returnValue) {
return returnValue !== undefined ? returnValue : defaultCallback();
});
}
else {
return returnValue;
}
}
return this.then<U>(function (value) {
return getFinalValue(function (): any {
return value;
});
}, function (error) {
return getFinalValue(function (): any {
throw error;
});
});
}
/**
* Adds a callback to the promise to be invoked when progress occurs within the asynchronous operation.
*/
progress(onProgress: (data?: any) => any): Promise<T> {
return this.then<T>(null, null, onProgress);
}
/**
* Adds a callback to the promise to be invoked when the asynchronous operation completes successfully.
*/
then: <U>(
onFulfilled?: (value?: T) => Promise.Thenable<U> | U,
onRejected?: (error?: Error) => Promise.Thenable<U> | U,
onProgress?: (data?: any) => any
) => Promise<U>;
}
module Promise {
export interface Canceler {
(reason: Error): any;
}
/**
* The Deferred class unwraps a promise in order to expose its internal state management functions.
*/
export class Deferred<T> {
/**
* The underlying promise for the Deferred.
*/
promise: Promise<T>;
constructor(canceler?: Promise.Canceler) {
this.promise = new Promise<T>((resolve, reject, progress, setCanceler) => {
this.progress = progress;
this.reject = reject;
this.resolve = resolve;
canceler && setCanceler(canceler);
});
}
/**
* Sends progress information for the underlying promise.
*
* @method
* @param data Additional information about the asynchronous operation’s progress.
*/
progress: (data?: any) => void;
/**
* Rejects the underlying promise with an error.
*
* @method
* @param error The error that should be used as the fulfilled value for the promise.
*/
reject: (error?: Error) => void;
/**
* Resolves the underlying promise with a value.
*
* @method
* @param value The value that should be used as the fulfilled value for the promise.
*/
resolve: (value?: Promise.Thenable<T> | T) => void;
}
/**
* The State enum represents the possible states of a promise.
*/
export enum State {
PENDING,
FULFILLED,
REJECTED
}
export interface Thenable<T> {
then<U>(
onFulfilled?: (value?: T) => Promise.Thenable<U> | U,
onRejected?: (error?: Error) => Promise.Thenable<U> | U
): Promise.Thenable<U>;
}
}
export = Promise; | the_stack |
import { IEntityConfig, IField, IFieldUpdate } from '@materia/interfaces';
import { Entity } from './entity';
import { MateriaError } from '../error';
import { App } from '../app';
import { Field } from './field';
import { ModelStatic } from '../database/interface';
import { QueryGenerator } from './query-generator';
import { FindAllQuery } from './queries/findAll';
import { FindOneQuery } from './queries/findOne';
import { CreateQuery } from './queries/create';
import { UpdateQuery } from './queries/update';
import { DeleteQuery } from './queries/delete';
import { SQLQuery } from './queries/sql';
import { CustomQuery } from './queries/custom';
import { BelongsToManyOptions, BelongsToOptions, HasManyOptions } from 'sequelize/types';
export class DBEntity extends Entity {
type: string;
currentDiff: Array<any>;
currentDiffUndo: Array<any>;
public model: ModelStatic;
reservedQueries = [
'create', 'list', 'get', 'update', 'delete'
];
constructor(app: App) {
super(app, {
findAll: FindAllQuery,
findOne: FindOneQuery,
create: CreateQuery,
update: UpdateQuery,
delete: DeleteQuery,
custom: CustomQuery,
sql: SQLQuery
});
this.type = 'db';
this.currentDiff = [];
this.currentDiffUndo = [];
}
updatePrimary(field, fieldobj, options): Promise<any> {
let pks = this.getPK();
let p: Promise<any> = Promise.resolve();
let restore_ai: any = false;
// For mysql, it needs to remove auto increment before dropping pk
if (this.app.database.type == 'mysql') {
for (const pk of pks) {
if (pk.autoIncrement) {
const pkcpy: IFieldUpdate = Object.assign({}, pk);
pkcpy.autoIncrement = false;
restore_ai = pk.name;
p = this.updateField(pk.name, pkcpy, options);
}
}
}
// remove PK constraint
if (pks.length) {
const pk_name = pks[0].name;
p = p.then(() => {
return this.app.database.interface.dropConstraint(this.name, { field: pk_name, type: 'primary' });
});
}
// add or remove field in PK
if (field.primary) {
if (!pks.find(x => x.name == fieldobj.name)) {
pks.push(fieldobj);
}
if (field.unique) {
delete field.unique;
}
} else {
pks = pks.filter(x => x.name != fieldobj.name);
if (field.unique == undefined) {
field.unique = true;
}
if (field.unique == false) {
delete field.unique;
}
}
// reconstruct PK constraint
if (pks.length) {
const pks_names = pks.map(x => x.name);
p = p.then(() => {
return this.app.database.interface.addConstraint(this.name, { fields: pks_names, type: 'primary' });
});
}
// restore auto increment for mysql, if it's still in pk
if (this.app.database.type == 'mysql' && restore_ai) {
if (pks.find(x => x.name == restore_ai)) {
p = p.then(() => {
const pk = this.getField(restore_ai);
const pkcpy: IFieldUpdate = Object.assign({}, pk);
pkcpy.autoIncrement = true;
p = this.updateField(pk.name, pkcpy, options);
});
} else {
if (restore_ai == fieldobj.name) {
p = p.then(() => {
fieldobj.autoIncrement = false;
});
}
}
}
delete field.primary;
return p;
}
updateUnique(field, fieldobj) {
let constraint_name;
let uniqueFields = [];
let p;
// remove field from unique constraints
p = this.app.database.interface.dropConstraint(this.name, { field: fieldobj.name, type: 'unique' });
if (typeof field.unique == 'string') {
// remove unique constraint group
uniqueFields = this.getUniqueFields(field.unique);
constraint_name = field.unique;
p = p.then(() => { return this.app.database.interface.dropConstraint(this.name, { name: constraint_name, type: 'unique' }); });
}
// add or remove field in constraint
if (field.unique) {
if (!uniqueFields.find((x) => { return x.name == fieldobj.name; })) {
uniqueFields.push(fieldobj);
}
} else {
if (uniqueFields.length) {
uniqueFields = uniqueFields.filter((x) => { return x.name != fieldobj.name; });
}
}
// reconstruct unique constraint
if (uniqueFields.length) {
const unique_names = uniqueFields.map((x) => { return x.name; });
p = p.then(() => {
return this.app.database.interface.addConstraint(this.name, { fields: unique_names, name: constraint_name, type: 'unique' });
});
}
return p;
}
updateField(name: string, field: IFieldUpdate, options?): Promise<Field> {
options = options || {};
const oldfield = this.getField(name);
return new Promise((accept, reject) => {
if ( ! oldfield) {
return reject(new MateriaError('This field does not exist'));
}
let fieldobj: Field;
let differ_done;
options.differ = (done) => {
differ_done = done;
};
const _field: IFieldUpdate = Object.assign({}, oldfield, field);
if (_field.autoIncrement && _field.type.toLowerCase() != 'number') {
delete _field.autoIncrement;
delete field.autoIncrement;
}
super.updateField(name, _field, options).then((_fieldobj) => {
fieldobj = _fieldobj;
if (options.db == false) {
differ_done();
throw null;
}
if (options.apply != false) {
if (field.type == oldfield.type) {
delete field.type;
}
if (field.unique == oldfield.unique) {
delete field.unique;
}
if (field.required == oldfield.required) {
delete field.required;
}
if (field.primary == oldfield.primary) {
delete field.primary;
}
if (field.default == oldfield.default) {
delete field.default;
}
if (field.autoIncrement == oldfield.autoIncrement) {
delete field.autoIncrement;
}
if (field.defaultValue == oldfield.defaultValue) {
delete field.defaultValue;
}
if (field.onUpdate == oldfield.onUpdate) {
delete field.onUpdate;
}
if (field.onDelete == oldfield.onDelete) {
delete field.onDelete;
}
}
if (oldfield.name != fieldobj.name) {
return this.app.database.interface.renameColumn(
this.name, oldfield.name,
fieldobj.name
);
}
}).then(() => {
if (field.primary != undefined) {
return this.updatePrimary(field, fieldobj, options);
}
}).then(() => {
if ( field.unique != undefined && ! fieldobj.primary ) {
return this.updateUnique(field, fieldobj);
}
}).then(() => {
delete field.unique;
if (oldfield.isRelation) {
field.isRelation = oldfield.isRelation;
}
if (field.references === null || field.isRelation) {
return this.app.database.interface.dropConstraint(this.name, { field: fieldobj.name, type: 'references' });
}
}).then(() => {
let dbfield = this.app.database.interface.fieldToColumn(field);
if (Object.keys(dbfield).length == 0) {
return Promise.resolve();
}
// sequelize changeColumn must constain type and nullable
if ( ! dbfield.type ) {
field.type = oldfield.type;
}
if (dbfield.allowNull == undefined) {
field.required = oldfield.required;
}
if (dbfield.default == undefined) {
field.default = oldfield.default;
}
if (field.default && dbfield.defaultValue == undefined) {
field.defaultValue = oldfield.defaultValue;
}
if (dbfield.onUpdate || dbfield.onDelete) {
field.onUpdate = dbfield.onUpdate;
field.onDelete = dbfield.onDelete;
}
let p: Promise<any> = Promise.resolve();
if (field.required && ! field.default && field.defaultValue) {
p = p.then(() => {
field.default = true;
field.required = false;
dbfield = this.app.database.interface.fieldToColumn(field);
field.default = false;
field.required = true;
delete field.defaultValue;
const vals = {};
const where = {};
vals[fieldobj.name] = dbfield.defaultValue;
where[fieldobj.name] = null;
return this.model.update(vals, { where: where });
});
}
return p.then(() => {
return this.app.database.interface.changeColumn(this.name, fieldobj.name, field);
});
}).then(() => {
differ_done();
if (options.apply != false) {
let p = this.loadModel().then(() => {
this.loadRelationsInModel();
this.refreshQueries();
});
if (field.name != oldfield.name) {
for (const entity of this.app.entities.findAll()) {
let need_save = false;
for (const relation of entity.relations) {
if (relation.reference && relation.reference.entity == this.name && relation.reference.field == oldfield.name) {
need_save = true;
relation.reference.field = field.name;
}
}
if (need_save) {
if (options.save != false) {
p = p.then(() => entity.save());
}
p = p.then(() => {
return entity.loadModel().then(() => {
entity.loadRelationsInModel();
});
});
}
}
}
return p;
}
}).then(() => {
accept(fieldobj);
}).catch((err) => {
if (err == null && options.db == false) {
return accept(fieldobj);
}
reject(err);
});
});
}
addFieldAt(field: IField, at: number, options?): Promise<Field> {
options = options || {};
field = Object.assign({}, field);
if (field.autoIncrement && field.type.toLowerCase() != 'number') {
delete field.autoIncrement;
}
const newOpts = field.generateFrom ? Object.assign({}, options, {
generateQueries: false
}) : options;
// await promise;
return super.addFieldAt(field, at, newOpts).then((fieldobj) => {
if (options.db == false) {
return Promise.resolve(fieldobj);
}
let p = Promise.resolve();
if (options.apply != false) {
p = this.loadModel().then(() => {
this.loadRelationsInModel();
});
}
const isUnique = field.unique;
field.unique = false;
// When field is required and doesn't have default value
// if it has data in the entity, it will make an error because the field can't be null and we don't know what do put into
if (field.required && ! field.default) {
field.default = true;
if (field.generateFrom) {
p = p.then(() => {
const entityFrom = this.app.entities.get(field.generateFrom);
const entityFromPk = entityFrom.getPK()[0];
return entityFrom.getQuery('list').run({ limit: 1 }, {raw: true}).then((list) => {
if ( ! list.data.length) {
const query: any = this.getQuery('list');
query.select = query.select.filter(fieldName => fieldName !== field.name);
return query.run({ limit: 1 }).then((from_list) => {
if (from_list.data.length) {
field.required = false;
field.default = false;
field.onDelete = 'NO ACTION';
const fieldInstance = this.getField(field.name);
fieldInstance.update(
Object.assign({},
fieldInstance.toJson(),
{
onDelete: 'NO ACTION',
required: false,
default: false,
unique: isUnique,
primary: false,
autoIncrement: false
}
));
} else {
delete field.defaultValue;
}
});
} else {
field.defaultValue = list.data[0][entityFromPk.name];
}
});
});
}
p = p.then(() => {
if (field.defaultValue == undefined) {
const defs = {
'number' : 0,
'text': '',
'string' : '',
'float': 0,
'boolean': false,
'date': new Date(0)
};
field.defaultValue = defs[field.type] === undefined ? '' : defs[field.type];
}
return this.app.database.interface.addColumn(this.name, field.name, field);
});
if (field.required) {
p = p.then(() => {
field.default = false;
delete field.defaultValue;
delete field.isRelation;
return this.app.database.interface.changeColumn(this.name, field.name, field);
});
}
} else {
p = p.then(() => {
return this.app.database.interface.addColumn(this.name, field.name, field);
});
}
if (options.apply != false) {
this.generateDefaultQueries();
this.refreshQueries();
}
if (isUnique) {
p = p.then(() => {
field.unique = true;
return this.updateUnique(field, fieldobj);
});
}
return p.then(() => {
return fieldobj;
});
}).catch((e) => {
return this.removeField(field.name, options).catch(() => {}).then(() => {
throw e;
});
});
}
removeField(field, options) {
options = options || {};
return super.removeField(field, options).then(() => {
if (options.db == false) {
return Promise.resolve();
}
let p = Promise.resolve();
if (options.apply != false) {
p = this.loadModel().then(() => {
this.loadRelationsInModel();
this.refreshQueries();
});
}
p = p.then(() => {
const fieldobj = { name: field, unique: false };
return this.updateUnique(fieldobj, fieldobj);
});
p = p.then(() => {
return this.app.database.interface.removeColumn(this.name, field);
});
return p;
});
}
generateDefaultQueries() {
const qg = new QueryGenerator(this);
qg.generateQueries();
}
loadModel(): Promise<void> {
const idField = this.getField('id');
let idPKforce = false;
if (idField && ! this.getPK().length) {
idPKforce = true;
idField.primary = true;
}
try {
this.model = this.app.database.interface.define(this);
} catch (e) {
return Promise.reject(e);
}
if (idPKforce) {
idField.primary = false;
}
if ( ! idField && this.model.rawAttributes.id) {
this.model.removeAttribute('id');
}
return Promise.resolve();
}
loadRelationsInModel(): void {
this.relations.forEach(relation => {
const entityDest = this.app.entities.get(relation.reference.entity);
if (entityDest && entityDest instanceof DBEntity) {
if ( ! relation.type || relation.type == 'belongsTo') {
let key = entityDest.getPK()[0].name;
if (relation.reference.field) {
key = relation.reference.field;
}
const opts: BelongsToOptions = {
foreignKey: relation.field,
targetKey: key
};
const opts2: HasManyOptions = {
foreignKey: relation.field
};
if (this.getField(relation.reference.entity)) {
opts.as = relation.field + DBEntity.getRelationId(relation.field);
opts2.as = relation.field + DBEntity.getRelationId(relation.field);
}
this.model.belongsTo(entityDest.model, opts);
entityDest.model.hasMany(this.model, opts2);
} else if (relation.type == 'hasMany') {
let key = this.getPK()[0].name;
if (relation.field) {
key = relation.field;
}
const opts: BelongsToOptions = {
foreignKey: relation.reference.field,
targetKey: key
};
const opts2: HasManyOptions = {
foreignKey: relation.reference.field
};
if (this.getField(relation.reference.entity)) {
opts.as = relation.field + DBEntity.getRelationId(relation.field);
opts2.as = relation.field + DBEntity.getRelationId(relation.field);
}
entityDest.model.belongsTo(this.model, opts);
this.model.hasMany(entityDest.model, opts2);
} else if (relation.type == 'belongsToMany') {
const entityThrough = this.app.entities.get(relation.through) as DBEntity;
if ( ! entityThrough) {
console.error('Through table not found');
} else {
const relationModel: BelongsToManyOptions = {
through: {
model: entityThrough.model
},
foreignKey: relation.as,
otherKey: relation.reference.as
};
if (this.model.getTableName() === entityDest.model.getTableName()) {
relationModel.as = entityThrough.name + DBEntity.getRelationId(entityThrough.name);
}
this.model.belongsToMany(entityDest.model, relationModel);
}
}
}
});
}
toJson(): IEntityConfig {
const json = super.toJson();
json.queries = json.queries.filter(query => ['get', 'list', 'update', 'create', 'delete'].indexOf(query.id) == -1);
return json;
}
static asRelationId: any = {};
static getRelationId(entityName): string {
if (DBEntity.asRelationId[entityName] == null) {
DBEntity.asRelationId[entityName] = 0;
}
return '' + DBEntity.asRelationId[entityName]++;
}
} | the_stack |
import React from 'react'
import { ListControlMenu, Input, ColorPicker, ListControlCheck } from 'mdui-in-react'
import saveFile from '../../utils/fileSaver'
/***https://developer.mozilla.org/zh-CN/docs/Web/Manifest***/
const Preview = (props: any) => {
const { config } = props;
if (config === "") return null;
var exportConfig = JSON.parse(JSON.stringify(config));
console.log(exportConfig)
exportConfig.relatedApp = config.relatedApp.data;
exportConfig.display = displays[config.display].value;
// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'icon' implicitly has an 'any' type.
config.icons.map((icon, i) => {
exportConfig.icons[i].sizes = `${icon.sizes}x${icon.sizes}`
})
var res = JSON.stringify(JSON.parse(JSON.stringify(exportConfig)), null, 4);
return (
<>
<Input
// @ts-expect-error ts-migrate(2322) FIXME: Type 'string' is not assignable to type 'number | ... Remove this comment to see the full error message
rows="10"
header=""
value={res}
/>
<button
onClick={() => {
var blob = new Blob([res], { type: "application/json;charset=utf-8" });
// @ts-expect-error ts-migrate(2345) FIXME: Property 'type' is missing in type '{ file: Blob; ... Remove this comment to see the full error message
saveFile({
file: blob,
filename: "manifest.json"
})
}}
className="mdui-btn mdui-btn-raised mdui-color-theme">
下载manifest.json
</button>
</>
)
}
//定义开发人员对Web应用程序的首选显示模式
const displays = [{
name: '全屏显示',
value: 'fullscreen'
}, {
name: '独立的应用程序(不含浏览器UI)',
value: 'standalon'
}, {
name: '独立的应用程序(含有浏览器UI)',
value: 'minimal-u'
}, {
name: '传统模式',
value: 'browser'
},]
type IconsState = any;
class Icons extends React.Component<{
delete(index: number): void
}, IconsState> {
constructor(props: Readonly<{ delete(index: number): void; }>) {
super(props);
this.state = {
src: "",
sizes: "",
type: "png",
edit: false,
update: {
open: false,
i: 0
}
}
}
render() {
const { src, sizes, type, edit, update } = this.state;
// @ts-expect-error ts-migrate(2339) FIXME: Property 'list' does not exist on type 'Readonly<{... Remove this comment to see the full error message
var applist = this.props.list.map((icon, i) => {
let button = (edit) ?
<button onClick={() => this.props.delete(i)} className="mdui-btn mdui-list-item-icon mdui-btn-icon">
<i className="mdui-icon material-icons mdui-text-color-red">delete</i>
</button>
:
null
return (
<li
mdui-dialog="{target:'#icon',history:false}"
className="mdui-list-item mdui-ripple"
onClick={() => {
update.open = true;
update.i = i;
this.setState({
src: icon.src,
sizes: icon.sizes,
type: icon.type,
edit: edit,
update: update
})
}}>
<i className="mdui-list-item-icon mdui-icon material-icons">insert_photo</i>
<div className="mdui-list-item-content">
<div className="mdui-list-item-title">{`${icon.sizes}x${icon.sizes}`}</div>
</div>
{button}
</li>
)
})
return (
<>
<li className="mdui-subheader">
图标
<span mdui-dialog="{target:'#icon',history:false}" className="mdui-text-color-theme">添加</span>
<span
onClick={() => {
this.setState({ edit: !edit })
}}
className="mdui-text-color-theme">
{(!edit) ? '编辑' : '确定'}
</span>
</li>
{applist}
<div style={{ display: 'inline-block' }} className="mdui-dialog" id="icon">
<div className="mdui-dialog-content">
<Input
onValueChange={newText => {
this.setState({ src: newText })
}}
header="图标路径"
value={src}
/>
<Input
onValueChange={newText => {
this.setState({ sizes: newText })
}}
header="尺寸(只需填写长或宽任意一个)"
type="number"
value={sizes}
/>
<Input
onValueChange={newText => {
this.setState({ type: newText })
}}
header="图标文件类型"
value={type}
/>
</div>
<div className="mdui-dialog-actions">
<button
onClick={() => {
if (update.open) {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'update' does not exist on type 'Readonly... Remove this comment to see the full error message
this.props.update(update.i, {
src: src,
sizes: sizes,
type: type
})
update.open = false
this.setState({ update: update })
} else {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'add' does not exist on type 'Readonly<{}... Remove this comment to see the full error message
this.props.add({
src: src,
sizes: sizes,
type: type
})
}
}}
className="mdui-btn mdui-ripple" mdui-dialog-close="true">
保存
</button>
<button className="mdui-btn mdui-ripple" mdui-dialog-close="true">取消</button>
</div>
</div>
</>
)
}
}
type RelatedAppState = any;
class RelatedApp extends React.Component<{
delete(index: number): void
}, RelatedAppState> {
constructor(props: Readonly<{ delete(index: number): void; }>) {
super(props);
this.state = {
store: "",
url: "",
id: "com.example.app1",
edit: false,
update: {
open: false,
i: 0
}
}
}
render() {
const { store, url, id, edit, update } = this.state;
// @ts-expect-error ts-migrate(2339) FIXME: Property 'list' does not exist on type 'Readonly<{... Remove this comment to see the full error message
var applist = this.props.list.map((app, i) => {
let button = (edit) ?
<button onClick={() => this.props.delete(i)} className="mdui-btn mdui-list-item-icon mdui-btn-icon">
<i className="mdui-icon material-icons mdui-text-color-red">delete</i>
</button>
:
null
return (
<li
key={i}
mdui-dialog="{target:'#relatedApp',history:false}"
className="mdui-list-item mdui-ripple"
onClick={() => {
update.open = true;
update.i = i;
this.setState({
store: app.store,
url: app.url,
id: app.id,
update: update
})
}}>
<i className="mdui-list-item-avatar mdui-icon material-icons">apps</i>
<div className="mdui-list-item-content">
<div className="mdui-list-item-title">{app.url}</div>
<div className="mdui-list-item-text mdui-list-item-one-line">
<span className="mdui-text-color-theme-text">{app.store}</span>
{app.id}</div>
</div>
{button}
</li>
)
})
return (
<>
<li className="mdui-subheader">
推荐安装原生APP
<span mdui-dialog="{target:'#relatedApp',history:false}" className="mdui-text-color-theme">添加</span>
<span
onClick={() => {
this.setState({ edit: !edit })
}}
className="mdui-text-color-theme">
{(!edit) ? '编辑' : '确定'}
</span>
</li>
{applist}
<li className="mdui-subheader"></li>
<div className="mdui-dialog" id="relatedApp">
<div className="mdui-dialog-content">
<Input
onValueChange={newText => {
this.setState({ store: newText })
}}
header="可以找到应用程序的平台"
value={store}
/>
<Input
onValueChange={newText => {
this.setState({ url: newText })
}}
header="可以找到应用程序的URL"
// @ts-expect-error ts-migrate(2322) FIXME: Type '"url"' is not assignable to type '"number" |... Remove this comment to see the full error message
type="url"
value={url}
/>
<Input
onValueChange={newText => {
this.setState({ id: newText })
}}
header="用于表示指定平台上的应用程序的ID"
value={id}
/>
</div>
<div className="mdui-dialog-actions">
<button
onClick={() => {
if (update.open) {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'update' does not exist on type 'Readonly... Remove this comment to see the full error message
this.props.update(update.i, {
store: store,
url: url,
id: id
})
update.open = false
this.setState({ update: update })
} else {
// @ts-expect-error ts-migrate(2339) FIXME: Property 'add' does not exist on type 'Readonly<{}... Remove this comment to see the full error message
this.props.add({
store: store,
url: url,
id: id
})
}
}}
className="mdui-btn mdui-ripple" mdui-dialog-close="true">
保存
</button>
<button className="mdui-btn mdui-ripple" mdui-dialog-close="true">取消</button>
</div>
</div>
</>
)
}
}
type CreateState = any;
class Create extends React.Component<{}, CreateState> {
constructor(props: {}) {
super(props);
this.state = {
name: "",
short_name: "",
description: "",
lang: "zh-CN",
display: 3,
icons: [],
relatedApp: { open: false, data: [] },
}
}
render() {
const { icons, display, description, lang, relatedApp, background_color, theme_color, name, short_name } = this.state
// @ts-expect-error ts-migrate(2339) FIXME: Property 'complete' does not exist on type 'Readon... Remove this comment to see the full error message
const { complete, previewFunc } = this.props
return (
<>
<Input
onValueChange={newText => {
this.setState({ name: newText })
}}
header="应用名称"
value={name}
/>
<Input
onValueChange={newText => {
this.setState({ short_name: newText })
}}
header="短名称"
value={short_name}
/>
<Input
onValueChange={newText => {
this.setState({ description: newText })
}}
header="应用描述"
value={description}
/>
<Input
onValueChange={newText => {
this.setState({ lang: newText })
}}
header="语言标记"
value={lang}
/>
<div className="mdui-row-xs-2">
<div className="mdui-col">
<ColorPicker
text="预定义背景色"
color={background_color}
onColorChange={newColor => {
this.setState({ background_color: newColor })
}}
/>
</div>
<div className="mdui-col">
<ColorPicker
text="主题颜色"
color={theme_color}
onColorChange={newColor => {
this.setState({ theme_color: newColor })
}}
/>
</div>
</div>
<ul className="mdui-list">
<ListControlMenu
icon="language"
// @ts-expect-error ts-migrate(2322) FIXME: Property 'text' does not exist on type 'IntrinsicA... Remove this comment to see the full error message
text="显示模式"
checked={display}
onCheckedChange={checked => {
this.setState({ display: checked })
}}
items={displays}
/>
<Icons
// @ts-expect-error ts-migrate(2322) FIXME: Property 'list' does not exist on type 'IntrinsicA... Remove this comment to see the full error message
list={icons}
// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'newIcon' implicitly has an 'any' type.
add={newIcon => {
icons.push(newIcon);
this.setState({
icons: icons
})
}}
delete={i => {
icons.splice(i, 1);
this.setState({
icons: icons
})
}}
// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'i' implicitly has an 'any' type.
update={(i, info) => {
icons.splice(i, 1, info);
this.setState({
icons: icons
})
}}
/>
<ListControlCheck
icon="android"
title="推荐安装原生APP"
checked={relatedApp.open}
onCheckedChange={checked => {
this.setState({
relatedApp: {
open: checked, data: relatedApp.data
}
})
}}
/>
<span style={{ display: (relatedApp.open) ? 'block' : 'none' }}>
<RelatedApp
// @ts-expect-error ts-migrate(2322) FIXME: Property 'list' does not exist on type 'IntrinsicA... Remove this comment to see the full error message
list={relatedApp.data}
// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'newApp' implicitly has an 'any' type.
add={newApp => {
relatedApp.data.push(newApp)
this.setState({
relatedApp: relatedApp
})
}}
delete={i => {
relatedApp.data.splice(i, 1);
this.setState({
relatedApp: relatedApp
})
}}
// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'i' implicitly has an 'any' type.
update={(i, info) => {
relatedApp.data.splice(i, 1, info);
this.setState({
relatedApp: relatedApp
})
}}
/>
</span>
</ul>
<button
onClick={() => {
complete(this.state);
previewFunc()
}}
className="mdui-fab mdui-color-theme mdui-fab-fixed">
<i className="mdui-icon material-icons"></i>
</button>
</>
)
}
}
type UiState = any;
class Ui extends React.Component<{}, UiState> {
previewBtn: any
constructor(props: {}) {
super(props);
this.state = {
result: ""
}
}
render() {
return (
<>
<div className="mdui-tab" mdui-tab="true">
<a href="#input" className="mdui-ripple">编辑</a>
<a ref={r => this.previewBtn = r} href="#preview" className="mdui-ripple">预览</a>
</div>
<div id="input">
<Create
// @ts-expect-error ts-migrate(2322) FIXME: Property 'complete' does not exist on type 'Intrin... Remove this comment to see the full error message
complete={result => {
this.setState({ result: result })
}}
previewFunc={() => this.previewBtn.click()}
/>
</div>
<div id="preview">
<Preview config={this.state.result} />
</div>
</>
)
}
}
export default Ui | the_stack |
import * as coreClient from "@azure/core-client";
/** Lists of Custom Locations operations. */
export interface CustomLocationOperationsList {
/** Next page of operations. */
nextLink?: string;
/** Array of customLocationOperation */
value: CustomLocationOperation[];
}
/** Custom Locations operation. */
export interface CustomLocationOperation {
/**
* Is this Operation a data plane operation
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly isDataAction?: boolean;
/**
* The name of the compute operation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly name?: string;
/**
* The origin of the compute operation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly origin?: string;
/**
* The description of the operation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly description?: string;
/**
* The display name of the compute operation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly operation?: string;
/**
* The resource provider for the operation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly provider?: string;
/**
* The display name of the resource the operation applies to.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly resource?: string;
}
/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). */
export interface ErrorResponse {
/** The error object. */
error?: ErrorDetail;
}
/** The error detail. */
export interface ErrorDetail {
/**
* The error code.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly code?: string;
/**
* The error message.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly message?: string;
/**
* The error target.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly target?: string;
/**
* The error details.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly details?: ErrorDetail[];
/**
* The error additional info.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly additionalInfo?: ErrorAdditionalInfo[];
}
/** The resource management error additional info. */
export interface ErrorAdditionalInfo {
/**
* The additional info type.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly type?: string;
/**
* The additional info.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly info?: Record<string, unknown>;
}
/** The List Custom Locations operation response. */
export interface CustomLocationListResult {
/**
* The URL to use for getting the next set of results.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly nextLink?: string;
/**
* The list of Custom Locations.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly value?: CustomLocation[];
}
/** Common fields that are returned in the response for all Azure Resource Manager resources */
export interface Resource {
/**
* Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly id?: string;
/**
* The name of the resource
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly name?: string;
/**
* The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly type?: string;
}
/** Identity for the resource. */
export interface Identity {
/**
* The principal ID of resource identity.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly principalId?: string;
/**
* The tenant ID of resource.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly tenantId?: string;
/** The identity type. */
type?: ResourceIdentityType;
}
/** This is optional input that contains the authentication that should be used to generate the namespace. */
export interface CustomLocationPropertiesAuthentication {
/** The type of the Custom Locations authentication */
type?: string;
/** The kubeconfig value. */
value?: string;
}
/** Metadata pertaining to creation and last modification of the resource. */
export interface SystemData {
/** The identity that created the resource. */
createdBy?: string;
/** The type of identity that created the resource. */
createdByType?: CreatedByType;
/** The timestamp of resource creation (UTC). */
createdAt?: Date;
/** The identity that last modified the resource. */
lastModifiedBy?: string;
/** The type of identity that last modified the resource. */
lastModifiedByType?: CreatedByType;
/** The timestamp of resource last modification (UTC) */
lastModifiedAt?: Date;
}
/** The Custom Locations patchable resource definition. */
export interface PatchableCustomLocations {
/** Identity for the resource. */
identity?: Identity;
/** Resource tags */
tags?: { [propertyName: string]: string };
/** This is optional input that contains the authentication that should be used to generate the namespace. */
authentication?: CustomLocationPropertiesAuthentication;
/** Contains the reference to the add-on that contains charts to deploy CRDs and operators. */
clusterExtensionIds?: string[];
/** Display name for the Custom Locations location. */
displayName?: string;
/** Connected Cluster or AKS Cluster. The Custom Locations RP will perform a checkAccess API for listAdminCredentials permissions. */
hostResourceId?: string;
/** Type of host the Custom Locations is referencing (Kubernetes, etc...). */
hostType?: HostType;
/** Kubernetes namespace that will be created on the specified cluster. */
namespace?: string;
/** Provisioning State for the Custom Location. */
provisioningState?: string;
}
/** List of EnabledResourceTypes definition. */
export interface EnabledResourceTypesListResult {
/**
* The URL to use for getting the next set of results.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly nextLink?: string;
/**
* The list of EnabledResourceTypes available for a customLocation.
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly value?: EnabledResourceType[];
}
/** Metadata of the Resource Type. */
export interface EnabledResourceTypePropertiesTypesMetadataItem {
/** Api Version of Resource Type */
apiVersion?: string;
/** Resource Provider Namespace of Resource Type */
resourceProviderNamespace?: string;
/** Resource Type */
resourceType?: string;
}
/** The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location' */
export type TrackedResource = Resource & {
/** Resource tags. */
tags?: { [propertyName: string]: string };
/** The geo-location where the resource lives */
location: string;
};
/** The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location */
export type ProxyResource = Resource & {};
/** Custom Locations definition. */
export type CustomLocation = TrackedResource & {
/** Identity for the resource. */
identity?: Identity;
/**
* Metadata pertaining to creation and last modification of the resource
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly systemData?: SystemData;
/** This is optional input that contains the authentication that should be used to generate the namespace. */
authentication?: CustomLocationPropertiesAuthentication;
/** Contains the reference to the add-on that contains charts to deploy CRDs and operators. */
clusterExtensionIds?: string[];
/** Display name for the Custom Locations location. */
displayName?: string;
/** Connected Cluster or AKS Cluster. The Custom Locations RP will perform a checkAccess API for listAdminCredentials permissions. */
hostResourceId?: string;
/** Type of host the Custom Locations is referencing (Kubernetes, etc...). */
hostType?: HostType;
/** Kubernetes namespace that will be created on the specified cluster. */
namespace?: string;
/** Provisioning State for the Custom Location. */
provisioningState?: string;
};
/** EnabledResourceType definition. */
export type EnabledResourceType = ProxyResource & {
/**
* Metadata pertaining to creation and last modification of the resource
* NOTE: This property will not be serialized. It can only be populated by the server.
*/
readonly systemData?: SystemData;
/** Cluster Extension ID */
clusterExtensionId?: string;
/** Cluster Extension Type */
extensionType?: string;
/** Metadata of the Resource Type */
typesMetadata?: EnabledResourceTypePropertiesTypesMetadataItem[];
};
/** Known values of {@link ResourceIdentityType} that the service accepts. */
export enum KnownResourceIdentityType {
SystemAssigned = "SystemAssigned",
None = "None"
}
/**
* Defines values for ResourceIdentityType. \
* {@link KnownResourceIdentityType} can be used interchangeably with ResourceIdentityType,
* this enum contains the known values that the service supports.
* ### Known values supported by the service
* **SystemAssigned** \
* **None**
*/
export type ResourceIdentityType = string;
/** Known values of {@link HostType} that the service accepts. */
export enum KnownHostType {
Kubernetes = "Kubernetes"
}
/**
* Defines values for HostType. \
* {@link KnownHostType} can be used interchangeably with HostType,
* this enum contains the known values that the service supports.
* ### Known values supported by the service
* **Kubernetes**
*/
export type HostType = string;
/** Known values of {@link CreatedByType} that the service accepts. */
export enum KnownCreatedByType {
User = "User",
Application = "Application",
ManagedIdentity = "ManagedIdentity",
Key = "Key"
}
/**
* Defines values for CreatedByType. \
* {@link KnownCreatedByType} can be used interchangeably with CreatedByType,
* this enum contains the known values that the service supports.
* ### Known values supported by the service
* **User** \
* **Application** \
* **ManagedIdentity** \
* **Key**
*/
export type CreatedByType = string;
/** Optional parameters. */
export interface CustomLocationsListOperationsOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listOperations operation. */
export type CustomLocationsListOperationsResponse = CustomLocationOperationsList;
/** Optional parameters. */
export interface CustomLocationsListBySubscriptionOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listBySubscription operation. */
export type CustomLocationsListBySubscriptionResponse = CustomLocationListResult;
/** Optional parameters. */
export interface CustomLocationsListByResourceGroupOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listByResourceGroup operation. */
export type CustomLocationsListByResourceGroupResponse = CustomLocationListResult;
/** Optional parameters. */
export interface CustomLocationsGetOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the get operation. */
export type CustomLocationsGetResponse = CustomLocation;
/** Optional parameters. */
export interface CustomLocationsCreateOrUpdateOptionalParams
extends coreClient.OperationOptions {
/** Delay to wait until next poll, in milliseconds. */
updateIntervalInMs?: number;
/** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */
resumeFrom?: string;
}
/** Contains response data for the createOrUpdate operation. */
export type CustomLocationsCreateOrUpdateResponse = CustomLocation;
/** Optional parameters. */
export interface CustomLocationsDeleteOptionalParams
extends coreClient.OperationOptions {
/** Delay to wait until next poll, in milliseconds. */
updateIntervalInMs?: number;
/** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */
resumeFrom?: string;
}
/** Optional parameters. */
export interface CustomLocationsUpdateOptionalParams
extends coreClient.OperationOptions {
/** Identity for the resource. */
identity?: Identity;
/** Resource tags */
tags?: { [propertyName: string]: string };
/** This is optional input that contains the authentication that should be used to generate the namespace. */
authentication?: CustomLocationPropertiesAuthentication;
/** Contains the reference to the add-on that contains charts to deploy CRDs and operators. */
clusterExtensionIds?: string[];
/** Display name for the Custom Locations location. */
displayName?: string;
/** Connected Cluster or AKS Cluster. The Custom Locations RP will perform a checkAccess API for listAdminCredentials permissions. */
hostResourceId?: string;
/** Type of host the Custom Locations is referencing (Kubernetes, etc...). */
hostType?: HostType;
/** Kubernetes namespace that will be created on the specified cluster. */
namespace?: string;
/** Provisioning State for the Custom Location. */
provisioningState?: string;
}
/** Contains response data for the update operation. */
export type CustomLocationsUpdateResponse = CustomLocation;
/** Optional parameters. */
export interface CustomLocationsListEnabledResourceTypesOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listEnabledResourceTypes operation. */
export type CustomLocationsListEnabledResourceTypesResponse = EnabledResourceTypesListResult;
/** Optional parameters. */
export interface CustomLocationsListOperationsNextOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listOperationsNext operation. */
export type CustomLocationsListOperationsNextResponse = CustomLocationOperationsList;
/** Optional parameters. */
export interface CustomLocationsListBySubscriptionNextOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listBySubscriptionNext operation. */
export type CustomLocationsListBySubscriptionNextResponse = CustomLocationListResult;
/** Optional parameters. */
export interface CustomLocationsListByResourceGroupNextOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listByResourceGroupNext operation. */
export type CustomLocationsListByResourceGroupNextResponse = CustomLocationListResult;
/** Optional parameters. */
export interface CustomLocationsListEnabledResourceTypesNextOptionalParams
extends coreClient.OperationOptions {}
/** Contains response data for the listEnabledResourceTypesNext operation. */
export type CustomLocationsListEnabledResourceTypesNextResponse = EnabledResourceTypesListResult;
/** Optional parameters. */
export interface CustomLocationsManagementClientOptionalParams
extends coreClient.ServiceClientOptions {
/** server parameter */
$host?: string;
/** Api Version */
apiVersion?: string;
/** Overrides client endpoint. */
endpoint?: string;
} | the_stack |
import {
autorun,
onBecomeObserved,
observable,
computed,
action,
makeObservable,
onBecomeUnobserved,
runInAction,
makeAutoObservable
} from "../../../src/mobx"
describe("become-observed", () => {
it("work on map with number as key", () => {
const oMap = observable.map()
const key = 1
oMap.set(key, observable.box("value"))
const cb = jest.fn()
onBecomeObserved(oMap, key, cb)
autorun(() => oMap.get(key))
expect(cb).toBeCalled()
})
})
test("#2309 don't trigger oBO for computeds that aren't subscribed to", () => {
const events: string[] = []
class Asd {
@observable prop = 42
@computed
get computed() {
return this.prop
}
@action
actionProp() {
const foo = this.prop
}
@action
actionComputed() {
const bar = this.computed
}
constructor() {
makeObservable(this)
}
}
const asd = new Asd()
onBecomeObserved(asd, "prop", () => {
events.push("onBecomeObserved")
})
onBecomeUnobserved(asd, "prop", () => {
events.push("onBecomeUnobserved")
})
asd.actionProp()
events.push("--")
asd.actionComputed()
expect(events).toEqual(["--"])
})
describe("#2309 onBecomeObserved inconsistencies", () => {
let events: string[] = []
beforeEach(() => {
events = []
})
test("caseA", () => {
// Computed {keepAlive: false} -> Observable
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
expect(events).toEqual([])
ca.get()
expect(events).toEqual([])
})
test("caseB", () => {
// Computed {keepAlive: false} -> Computed {keepAlive: false} -> Observable
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
const cb = computed(
() => {
return ca.get() * 2
},
{ keepAlive: false }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
onBecomeObserved(cb, () => events.push(`cb observed`))
onBecomeUnobserved(cb, () => events.push(`cb unobserved`))
expect(events).toEqual([])
cb.get()
expect(events).toEqual([])
})
test("caseC", () => {
// Computed {keepAlive: true} -> Observable
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: true }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
expect(events).toEqual([])
ca.get()
expect(events).toEqual(["o observed"]) // everything is hot, and 'o' is really observed so that the keptAlive computed knows about its state
})
test("caseD", () => {
// Computed {keepAlive: true} -> Computed {keepAlive: false} -> Observable
// logs: `o observed`
// potential issue: why are the callbacks not called on `ca` ?
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
const cb = computed(
() => {
return ca.get() * 2
},
{ keepAlive: true }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
onBecomeObserved(cb, () => events.push(`cb observed`))
onBecomeUnobserved(cb, () => events.push(`cb unobserved`))
expect(events).toEqual([])
cb.get()
expect(events).toEqual(["ca observed", "o observed"]) // see above
})
test("caseE - base", () => {
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
const cb = computed(
() => {
return ca.get() * 2
},
{ keepAlive: false }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
onBecomeObserved(cb, () => events.push(`cb observed`))
onBecomeUnobserved(cb, () => events.push(`cb unobserved`))
const u = autorun(() => cb.get())
u()
expect(events).toEqual([
"cb observed",
"ca observed",
"o observed",
"cb unobserved",
"ca unobserved",
"o unobserved"
])
})
test("caseE", () => {
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
const cb = computed(
() => {
return ca.get() * 2
},
{ keepAlive: true }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
onBecomeObserved(cb, () => events.push(`cb observed`))
onBecomeUnobserved(cb, () => events.push(`cb unobserved`))
const u = autorun(() => cb.get())
u()
// Note that at this point the observables never become unobserved anymore!
// That is correct, because if doing our kept-alive computed doesn't recompute until reobserved,
// itself it is still observing all the values of its own deps to figure whether it is still
// up to date or not
expect(events).toEqual(["cb observed", "ca observed", "o observed", "cb unobserved"])
events.splice(0)
const u2 = autorun(() => cb.get())
u2()
expect(events).toEqual(["cb observed", "cb unobserved"])
})
test("caseF", () => {
// Computed {keepAlive: true} -> Computed {keepAlive: false} -> Observable
// cb.get() first then autorun() then unsub()
const o = observable.box(1)
const ca = computed(
() => {
return o.get()
},
{ keepAlive: false }
)
const cb = computed(
() => {
return ca.get() * 2
},
{ keepAlive: true }
)
onBecomeObserved(o, () => events.push(`o observed`))
onBecomeUnobserved(o, () => events.push(`o unobserved`))
onBecomeObserved(ca, () => events.push(`ca observed`))
onBecomeUnobserved(ca, () => events.push(`ca unobserved`))
onBecomeObserved(cb, () => events.push(`cb observed`))
onBecomeUnobserved(cb, () => events.push(`cb unobserved`))
cb.get()
expect(events).toEqual(["ca observed", "o observed"])
events.splice(0)
const u = autorun(() => cb.get())
u()
expect(events).toEqual(["cb observed", "cb unobserved"])
})
})
describe("nested computes don't trigger hooks #2686", () => {
let events: string[] = []
class Lower {
public lowerValue$ = -1
public isObserved = false
constructor() {
makeObservable(this, {
lowerValue$: observable
})
onBecomeObserved(
this,
"lowerValue$",
action(() => {
events.push("onBecomeObserved")
this.isObserved = true
})
)
onBecomeUnobserved(
this,
"lowerValue$",
action(() => {
events.push("onBecomeUnobserved")
this.isObserved = false
})
)
}
}
class UpperComputed {
constructor() {
makeObservable(this, {
upperValue$: computed,
lower$: observable.ref
})
}
public lower$: Lower | undefined
public get upperValue$() {
events.push("upperValue$")
const lower = this.lower$
return lower ? lower.lowerValue$ : -Infinity
}
}
const upperComputed = new UpperComputed()
const lowerForComputed = new Lower()
// Set up observers
const d = autorun(() => {
events.push("value read through computed: " + upperComputed.upperValue$)
})
// Provide the 'lower' values
runInAction(() => {
upperComputed.lower$ = lowerForComputed
})
// Check if the lower values are being observed.
expect(lowerForComputed.isObserved).toBe(true)
d()
expect(lowerForComputed.isObserved).toBe(false)
expect(events).toEqual([
"upperValue$",
"value read through computed: -Infinity",
"upperValue$",
"onBecomeObserved",
"value read through computed: -1",
"onBecomeUnobserved"
])
})
test("#2686 - 2", () => {
const events: string[] = []
const options = { useColors: false }
makeAutoObservable(options)
const selection = { color: "red" }
makeAutoObservable(selection)
const blue = computed(() => {
let val
if (options.useColors) {
const isSelected = computed(() => selection.color === "blue")
onBecomeObserved(isSelected, () => events.push("observing"))
onBecomeUnobserved(isSelected, () => events.push("unobserving"))
val = isSelected.get()
}
return { isSelected: val }
})
const d = autorun(() => events.push(blue.get().isSelected ? "selected" : "unselected"))
runInAction(() => {
options.useColors = true
selection.color = "blue"
})
d()
expect(events).toEqual(["unselected", "observing", "selected", "unobserving"])
})
test("#2686 - 3", () => {
const events: string[] = []
// half first element of array
function halfFirst(data) {
const first = computed(() => {
events.push("recalculating")
return Math.round(data.elements[0] / 2) + data.suffix
})
onBecomeObserved(first, () => {
events.push("observing first")
})
return first
}
// APP
const network = observable({ model: null as any })
// load
const load = computed(() => {
// wait to load it
if (network.model) {
return halfFirst(network.model)
}
return undefined
})
// display
const result = computed(() => (load.get() ? load.get()!.get() : "loading"))
autorun(() => {
events.push("Current result: " + result.get())
})
runInAction(() => (network.model = observable({ suffix: "$", elements: [2, 4, 5] })))
runInAction(() => (network.model.elements[0] = 3))
runInAction(() => (network.model.elements[0] = 4))
runInAction(() => (network.model.elements[0] = 5))
expect(events).toEqual([
"Current result: loading",
"observing first",
"recalculating",
"Current result: 1$",
"recalculating",
"Current result: 2$",
"recalculating",
"recalculating",
"Current result: 3$"
])
})
test("#2667", () => {
const events: any[] = []
class LazyInitializedList {
@observable
public items: string[] | undefined
@observable
public listName
public constructor(listName: string, lazyItems: string[]) {
makeObservable(this)
this.listName = listName
onBecomeObserved(
this,
"items",
action(() => {
this.items = lazyItems
events.push("onBecomeObserved" + listName)
})
)
onBecomeUnobserved(
this,
"items",
action(() => {
this.items = undefined
events.push("onBecomeUnobserved" + listName)
})
)
}
}
class ItemsStore {
@observable
private list: LazyInitializedList
public constructor() {
this.list = new LazyInitializedList("initial", ["a, b, c"])
makeObservable(this)
}
@action
public changeList = () => {
this.list = new LazyInitializedList("new", ["b, c, a"])
}
@computed
public get items(): string[] | undefined {
return this.list.items
}
@computed
public get activeListName(): string {
return this.list.listName
}
}
const store = new ItemsStore()
const d = autorun(() => {
events.push(store.items?.length ?? "-")
events.push(store.activeListName)
})
store.changeList()
d()
expect(events).toEqual([
"onBecomeObservedinitial",
1,
"initial",
"onBecomeObservednew",
1,
"new",
"onBecomeUnobservedinitial",
"onBecomeUnobservednew"
])
}) | the_stack |
import chai from 'chai';
import chaiHTTP from 'chai-http';
import sinon from 'sinon';
import utils from './data-generator.utils';
import { IServiceRecordAttributes } from '../src/db/services.schema';
import * as servicesModel from '../src/db/services.schema';
import cronJobs from '../src/controllers/cron-jobs';
import controllerUtils from '../src/controllers/utils';
chai.use(chaiHTTP);
describe('/api/service', async () => {
let server: any = null;
before(async () => {
utils.indexFile(true);
utils.enableAuth(false);
server = await utils.startServer();
});
after(async () => {
utils.indexFile(false);
await utils.stopServer(server);
});
beforeEach(async () => {
await servicesModel.Services.sync({ force: true });
utils.cleanUpConfigFile();
});
afterEach(async () => {
await servicesModel.Services.drop();
sinon.restore();
utils.cleanUpConfigFile();
});
it('/service POST should create a new service', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
it('/service POST should not create a duplicate service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service POST should return error for invalid service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'addService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service GET should get an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
utils.compareServices(response.body.data.service, service).should.be.true;
});
it('/service GET should return error in case of exception', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(servicesModel, 'findService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.get('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service GET should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
});
it('/service PUT should update an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
service.name = 'changed';
response = await chai.request(server)
.put('/api/service')
.send({
shortName: service.shortName,
service
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
utils.compareServices(response.body.data.service, service).should.be.true;
});
it('/service PUT should not update a non-existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
service.shortName = 'non-existent';
service.name = 'changed';
response = await chai.request(server)
.put('/api/service')
.send({
shortName: service.shortName,
service
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.affectedRows.should.be.equal(0);
});
it('/service PUT should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
service.url = 'changed';
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.put('/api/service')
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'updateService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.put('/api/service')
.send({
shortName: service.shortName,
service: service
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service PUT should return error for changing shortName', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.put('/api/service')
.send({
shortName: 'changed',
service
});
response.status.should.to.be.equal(403);
response.body.status.should.be.equal('error');
response.body.message.should.be.equal('shortName cannot be changed.');
});
it('/service/all GET should return list of services', async () => {
const services = utils.randomServiceRecords();
services.forEach(async s => {
let response = await chai.request(server)
.post('/api/service')
.send(s);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
let response = await chai.request(server)
.get('/api/service/all');
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.services.forEach((s: IServiceRecordAttributes, i: number) => {
utils.compareServices(s, services[i]).should.be.true;
});
});
it('/service/all GET should return error in case of internal exception', async () => {
const services = utils.randomServiceRecords();
services.forEach(async s => {
let response = await chai.request(server)
.post('/api/service')
.send(s);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
sinon.stub(servicesModel, 'getAllServices').returns(new Promise(resolve => resolve({ success: false })));
let response = await chai.request(server)
.get('/api/service/all');
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service/status GET should return online status of service with shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(controllerUtils, 'isReachable').returns(new Promise(resolve => resolve(true)));
await cronJobs.job();
const actual = await chai.request(server)
.get('/api/service/status').query({
shortName: service.shortName
});
actual.status.should.be.equal(200);
actual.body.status.should.be.equal('success');
actual.body.data.online.should.be.true;
});
it('/service/status GET should return offline status of service with shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(controllerUtils, 'isReachable').returns(new Promise(resolve => resolve(false)));
await cronJobs.job();
const actual = await chai.request(server)
.get('/api/service/status').query({
shortName: service.shortName
});
actual.status.should.be.equal(200);
actual.body.status.should.be.equal('success');
actual.body.data.online.should.be.false;
});
it('/service/status GET should return error for missing shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
const actual = await chai.request(server)
.get('/api/service/status').query({});
actual.status.should.be.equal(403);
actual.body.status.should.be.equal('error');
actual.body.message.should.be.equal('Missing or incorrect data.');
});
it('/service/status GET should return error for internal exception', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(servicesModel, 'getStatus').returns(new Promise(resolve => resolve({ success: false, errorMessage: 'Error occurred.' })));
const actual = await chai.request(server)
.get('/api/service/status').query({ shortName: service.shortName });
actual.status.should.be.equal(500);
actual.body.status.should.be.equal('error');
actual.body.message.should.be.equal('Error occurred.');
});
it('/service DELETE should delete an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.delete('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(404);
response.body.status.should.be.equal('error');
});
it('/service DELETE should not delete a non-existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.delete('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.deletedRows.should.be.equal(0);
});
it('/service DELETE should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.delete('/api/service')
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'deleteService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.delete('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
});
/*
MIT License
Copyright (c) 2021 Mandar Patil (mandarons@pm.me)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
describe('/api/service - authenticated', async () => {
let server: any = null;
let cookieString: string = '';
before(async () => {
utils.indexFile(true);
utils.enableAuth(true);
server = await utils.startServer();
cookieString = (await chai.request(server).post('/api/auth/login').send({ username: 'admin', password: 'admin' })).header['set-cookie'][0];
});
after(async () => {
utils.indexFile(false);
await utils.stopServer(server);
});
beforeEach(async () => {
await servicesModel.Services.sync({ force: true });
utils.cleanUpConfigFile();
});
afterEach(async () => {
await servicesModel.Services.drop();
sinon.restore();
utils.cleanUpConfigFile();
});
it('/service POST should create a new service', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
it('/service POST should not create a new service if unauthenticated', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.send(service);
response.status.should.to.be.equal(401);
response.text.should.be.equal('Unauthorized');
});
it('/service POST should not create a duplicate service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service POST should return error for invalid service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'addService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service GET should get an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
utils.compareServices(response.body.data.service, service).should.be.true;
});
it('/service GET should return 401 for unauthorized request', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(401);
response.text.should.be.equal('Unauthorized');
});
it('/service GET should return error in case of exception', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(servicesModel, 'findService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.get('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service GET should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.set('Cookie', cookieString)
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
});
it('/service PUT should update an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
service.name = 'changed';
response = await chai.request(server)
.put('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName,
service
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
utils.compareServices(response.body.data.service, service).should.be.true;
});
it('/service PUT should return 401 for unauthorized', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
service.name = 'changed';
response = await chai.request(server)
.put('/api/service')
.send({
shortName: service.shortName,
service
});
response.status.should.to.be.equal(401);
response.text.should.be.equal('Unauthorized');
});
it('/service PUT should not update a non-existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
service.shortName = 'non-existent';
service.name = 'changed';
response = await chai.request(server)
.put('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName,
service
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.affectedRows.should.be.equal(0);
});
it('/service PUT should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
service.url = 'changed';
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.put('/api/service')
.set('Cookie', cookieString)
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'updateService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.put('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName,
service: service
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service PUT should return error for changing shortName', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.put('/api/service')
.set('Cookie', cookieString)
.send({
shortName: 'changed',
service
});
response.status.should.to.be.equal(403);
response.body.status.should.be.equal('error');
response.body.message.should.be.equal('shortName cannot be changed.');
});
it('/service/all GET should return list of services', async () => {
const services = utils.randomServiceRecords();
services.forEach(async s => {
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(s);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
let response = await chai.request(server)
.get('/api/service/all')
.set('Cookie', cookieString);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.services.forEach((s: IServiceRecordAttributes, i: number) => {
utils.compareServices(s, services[i]).should.be.true;
});
});
it('/service/all GET should return 401 for unauthorized', async () => {
const services = utils.randomServiceRecords();
services.forEach(async s => {
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(s);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
let response = await chai.request(server)
.get('/api/service/all');
response.status.should.to.be.equal(401);
response.text.should.be.equal('Unauthorized');
});
it('/service/all GET should return error in case of internal exception', async () => {
const services = utils.randomServiceRecords();
services.forEach(async s => {
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(s);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
});
sinon.stub(servicesModel, 'getAllServices').returns(new Promise(resolve => resolve({ success: false })));
let response = await chai.request(server)
.get('/api/service/all')
.set('Cookie', cookieString);
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
it('/service/status GET should return online status of service with shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(controllerUtils, 'isReachable').returns(new Promise(resolve => resolve(true)));
await cronJobs.job();
const actual = await chai.request(server)
.get('/api/service/status').query({
shortName: service.shortName
})
.set('Cookie', cookieString);
actual.status.should.be.equal(200);
actual.body.status.should.be.equal('success');
actual.body.data.online.should.be.true;
});
it('/service/status GET should return 401 for unauthorized', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(controllerUtils, 'isReachable').returns(new Promise(resolve => resolve(true)));
await cronJobs.job();
const actual = await chai.request(server)
.get('/api/service/status').query({
shortName: service.shortName
});
actual.status.should.be.equal(401);
actual.text.should.be.equal('Unauthorized');
});
it('/service/status GET should return offline status of service with shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(controllerUtils, 'isReachable').returns(new Promise(resolve => resolve(false)));
await cronJobs.job();
const actual = await chai.request(server)
.get('/api/service/status').query({
shortName: service.shortName
})
.set('Cookie', cookieString);
actual.status.should.be.equal(200);
actual.body.status.should.be.equal('success');
actual.body.data.online.should.be.false;
});
it('/service/status GET should return error for missing shortName', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
const actual = await chai.request(server)
.get('/api/service/status').query({})
.set('Cookie', cookieString);
actual.status.should.be.equal(403);
actual.body.status.should.be.equal('error');
actual.body.message.should.be.equal('Missing or incorrect data.');
});
it('/service/status GET should return error for internal exception', async () => {
const service = utils.randomServiceRecord();
const response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
sinon.stub(servicesModel, 'getStatus').returns(new Promise(resolve => resolve({ success: false, errorMessage: 'Error occurred.' })));
const actual = await chai.request(server)
.get('/api/service/status').query({ shortName: service.shortName }).set('Cookie', cookieString);
actual.status.should.be.equal(500);
actual.body.status.should.be.equal('error');
actual.body.message.should.be.equal('Error occurred.');
});
it('/service DELETE should delete an existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.delete('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.get('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(404);
response.body.status.should.be.equal('error');
});
it('/service DELETE should return 401 for unauthorized', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.delete('/api/service')
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(401);
response.text.should.be.equal('Unauthorized');
});
it('/service DELETE should not delete a non-existing service', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.delete('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response.body.data.deletedRows.should.be.equal(0);
});
it('/service DELETE should return error for invalid data', async () => {
const service = utils.randomServiceRecord();
let response = await chai.request(server)
.post('/api/service')
.set('Cookie', cookieString)
.send(service);
response.status.should.to.be.equal(200);
response.body.status.should.be.equal('success');
response = await chai.request(server)
.delete('/api/service')
.set('Cookie', cookieString)
.send({});
response.status.should.to.be.equal(400);
response.body.status.should.be.equal('error');
sinon.stub(servicesModel, 'deleteService').returns(new Promise(resolve => resolve({ success: false })));
response = await chai.request(server)
.delete('/api/service')
.set('Cookie', cookieString)
.send({
shortName: service.shortName
});
response.status.should.to.be.equal(500);
response.body.status.should.be.equal('error');
});
}); | the_stack |
import config from '../../../config';
import { Item } from '../../../services/database/types';
import { ErrorCode } from '../../../utils/errors';
import { createUserAndSession, db, makeNoteSerializedBody, models } from '../../../utils/testing/testUtils';
import { Config, StorageDriverConfig, StorageDriverMode } from '../../../utils/types';
import newModelFactory from '../../factory';
import loadStorageDriver from './loadStorageDriver';
import { Context } from './StorageDriverBase';
const newTestModels = (driverConfig: StorageDriverConfig, driverConfigFallback: StorageDriverConfig = null) => {
const newConfig: Config = {
...config(),
storageDriver: driverConfig,
storageDriverFallback: driverConfigFallback,
};
return newModelFactory(db(), newConfig);
};
export function shouldWriteToContentAndReadItBack(driverConfig: StorageDriverConfig) {
test('should write to content and read it back', async function() {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const testModels = newTestModels(driverConfig);
const driver = await testModels.item().storageDriver();
const output = await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const result = output['00000000000000000000000000000001.md'];
expect(result.error).toBeFalsy();
const item = await testModels.item().loadWithContent(result.item.id);
expect(item.content.byteLength).toBe(item.content_size);
expect(item.content_storage_id).toBe(driver.storageId);
const rawContent = await driver.read(item.id, { models: models() });
expect(rawContent.byteLength).toBe(item.content_size);
const jopItem = testModels.item().itemToJoplinItem(item);
expect(jopItem.id).toBe('00000000000000000000000000000001');
expect(jopItem.title).toBe('testing driver');
});
}
export function shouldDeleteContent(driverConfig: StorageDriverConfig) {
test('should delete the content', async function() {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const testModels = newTestModels(driverConfig);
const output = await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const item: Item = output['00000000000000000000000000000001.md'].item;
expect((await testModels.item().all()).length).toBe(1);
await testModels.item().delete(item.id);
expect((await testModels.item().all()).length).toBe(0);
});
}
export function shouldNotCreateItemIfContentNotSaved(driverConfig: StorageDriverConfig) {
test('should not create the item if the content cannot be saved', async function() {
const testModels = newTestModels(driverConfig);
const driver = await testModels.item().storageDriver();
const previousWrite = driver.write;
driver.write = () => { throw new Error('not working!'); };
try {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const output = await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
expect((await testModels.item().all()).length).toBe(0);
} finally {
driver.write = previousWrite;
}
});
}
export function shouldNotUpdateItemIfContentNotSaved(driverConfig: StorageDriverConfig) {
test('should not update the item if the content cannot be saved', async function() {
const { user } = await createUserAndSession(1);
const noteBody = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing driver',
});
const testModels = newTestModels(driverConfig);
const driver = await testModels.item().storageDriver();
await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBody),
}]);
const noteBodyMod1 = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'updated 1',
});
await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBodyMod1),
}]);
const itemMod1 = testModels.item().itemToJoplinItem(await testModels.item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
expect(itemMod1.title).toBe('updated 1');
const noteBodyMod2 = makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'updated 2',
});
const previousWrite = driver.write;
driver.write = () => { throw new Error('not working!'); };
try {
const output = await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(noteBodyMod2),
}]);
expect(output['00000000000000000000000000000001.md'].error.message).toBe('not working!');
const itemMod2 = testModels.item().itemToJoplinItem(await testModels.item().loadByJopId(user.id, '00000000000000000000000000000001', { withContent: true }));
expect(itemMod2.title).toBe('updated 1'); // Check it has not been updated
} finally {
driver.write = previousWrite;
}
});
}
export function shouldSupportFallbackDriver(driverConfig: StorageDriverConfig, fallbackDriverConfig: StorageDriverConfig) {
test('should support fallback content drivers', async function() {
const { user } = await createUserAndSession(1);
const testModels = newTestModels(driverConfig);
const driver = await testModels.item().storageDriver();
const output = await testModels.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
let previousByteLength = 0;
{
const content = await driver.read(itemId, { models: models() });
expect(content.byteLength).toBeGreaterThan(10);
previousByteLength = content.byteLength;
}
const testModelWithFallback = newTestModels(driverConfig, fallbackDriverConfig);
// If the item content is not on the main content driver, it should get
// it from the fallback one.
const itemFromDb = await testModelWithFallback.item().loadWithContent(itemId);
expect(itemFromDb.content.byteLength).toBe(previousByteLength);
// When writing content, it should use the main content driver, and set
// the content for the fallback one to "".
await testModelWithFallback.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing1234',
})),
}]);
{
const fallbackDriver = await testModelWithFallback.item().storageDriverFallback();
// Check that it has cleared the fallback driver content
const context: Context = { models: models() };
const fallbackContent = await fallbackDriver.read(itemId, context);
expect(fallbackContent.byteLength).toBe(0);
// Check that it has written to the main driver content
const mainContent = await driver.read(itemId, context);
expect(mainContent.byteLength).toBe(previousByteLength + 4);
}
});
}
export function shouldSupportFallbackDriverInReadWriteMode(driverConfig: StorageDriverConfig, fallbackDriverConfig: StorageDriverConfig) {
test('should support fallback content drivers in rw mode', async function() {
if (fallbackDriverConfig.mode !== StorageDriverMode.ReadAndWrite) throw new Error('Content driver must be configured in RW mode for this test');
const { user } = await createUserAndSession(1);
const testModelWithFallback = newTestModels(driverConfig, fallbackDriverConfig);
const output = await testModelWithFallback.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
{
const driver = await testModelWithFallback.item().storageDriver();
const fallbackDriver = await testModelWithFallback.item().storageDriverFallback();
// Check that it has written the content to both drivers
const context: Context = { models: models() };
const fallbackContent = await fallbackDriver.read(itemId, context);
expect(fallbackContent.byteLength).toBeGreaterThan(10);
const mainContent = await driver.read(itemId, context);
expect(mainContent.toString()).toBe(fallbackContent.toString());
}
});
}
export function shouldUpdateContentStorageIdAfterSwitchingDriver(oldDriverConfig: StorageDriverConfig, newDriverConfig: StorageDriverConfig) {
test('should update content storage ID after switching driver', async function() {
if (oldDriverConfig.type === newDriverConfig.type) throw new Error('Drivers must be different for this test');
const { user } = await createUserAndSession(1);
const oldDriverModel = newTestModels(oldDriverConfig);
const newDriverModel = newTestModels(newDriverConfig);
const oldDriver = await oldDriverModel.item().storageDriver();
const newDriver = await newDriverModel.item().storageDriver();
const output = await oldDriverModel.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
const itemId = output['00000000000000000000000000000001.md'].item.id;
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(oldDriver.storageId);
await newDriverModel.item().saveFromRawContent(user, [{
name: '00000000000000000000000000000001.md',
body: Buffer.from(makeNoteSerializedBody({
id: '00000000000000000000000000000001',
title: 'testing',
})),
}]);
expect(await newDriverModel.item().count()).toBe(1);
expect((await oldDriverModel.item().load(itemId)).content_storage_id).toBe(newDriver.storageId);
});
}
export function shouldThrowNotFoundIfNotExist(driverConfig: StorageDriverConfig) {
test('should throw not found if item does not exist', async function() {
const driver = await loadStorageDriver(driverConfig, db());
let error: any = null;
try {
await driver.read('doesntexist', { models: models() });
} catch (e) {
error = e;
}
expect(error).toBeTruthy();
expect(error.code).toBe(ErrorCode.NotFound);
});
} | the_stack |
import * as React from 'react';
import * as ScrollArea from './ScrollArea';
import { css } from '../../../../stitches.config';
export default { title: 'Components/ScrollArea' };
export const Basic = () => {
const [props, setProps] = React.useState({} as any);
return (
<>
<div style={{ margin: '20px auto', width: 'max-content', textAlign: 'center' }}>
<form
onChange={(event) => {
const formData = new FormData(event.currentTarget);
const entries = (formData as any).entries();
const cleanup = [...entries].map(([key, value]: any) => [key, value || undefined]);
const props = Object.fromEntries(cleanup);
setProps(props);
}}
>
<label>
type:{' '}
<select name="type">
<option></option>
<option>always</option>
<option>auto</option>
<option>scroll</option>
<option>hover</option>
</select>
</label>{' '}
<label>
dir:{' '}
<select name="dir">
<option></option>
<option>ltr</option>
<option>rtl</option>
</select>
</label>{' '}
<label>
scrollHideDelay: <input type="number" name="scrollHideDelay" />
</label>
</form>
</div>
<ScrollAreaStory
{...props}
key={props.type}
style={{ width: 800, height: 800, margin: '30px auto' }}
>
{Array.from({ length: 30 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
</>
);
};
export const Resizable = () => (
<div
style={{
width: 800,
height: 800,
padding: 20,
resize: 'both',
border: '1px solid gray',
overflow: 'hidden',
}}
>
<ScrollAreaStory style={{ width: '100%', height: '100%' }}>
{Array.from({ length: 30 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
</div>
);
export const ContentChange = () => {
const [verticalCount, setVerticalCount] = React.useState(1);
const [horizontalCount, setHorizontalCount] = React.useState(1);
return (
<>
<button onClick={() => setVerticalCount((count) => count + 1)}>Add vertical content</button>
<button onClick={() => setHorizontalCount((count) => count + 1)}>
Increase horizontal size
</button>
<ScrollAreaStory type="always" style={{ width: 800, height: 800 }}>
{Array.from({ length: verticalCount }).map((_, index) => (
<Copy key={index} style={{ width: 300 * horizontalCount + 'px' }} />
))}
</ScrollAreaStory>
</>
);
};
export const Chromatic = () => (
<>
<h1>Vertical</h1>
<h2>Auto with overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal={false}>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Auto without overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal={false}>
<Copy style={{ height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Always with overflow</h2>
<ScrollAreaStory type="always" vertical horizontal={false}>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Always without overflow</h2>
<ScrollAreaStory type="always" vertical horizontal={false}>
<Copy style={{ height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Scroll with overflow</h2>
<ScrollAreaStory type="scroll" vertical horizontal={false}>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Scroll without overflow</h2>
<ScrollAreaStory type="scroll" vertical horizontal={false}>
<Copy style={{ height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Hover with overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal={false}>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Hover without overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal={false}>
<Copy style={{ height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h1>Horizontal</h1>
<h2>Auto with overflow</h2>
<ScrollAreaStory type="auto" vertical={false} horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Auto without overflow</h2>
<ScrollAreaStory type="auto" vertical={false} horizontal>
<Copy style={{ width: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Always with overflow</h2>
<ScrollAreaStory type="always" vertical={false} horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Always without overflow</h2>
<ScrollAreaStory type="always" vertical={false} horizontal>
<Copy style={{ width: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Scroll with overflow</h2>
<ScrollAreaStory type="scroll" vertical={false} horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Scroll without overflow</h2>
<ScrollAreaStory type="scroll" vertical={false} horizontal>
<Copy style={{ width: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Hover with overflow</h2>
<ScrollAreaStory type="hover" vertical={false} horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Hover without overflow</h2>
<ScrollAreaStory type="hover" vertical={false} horizontal>
<Copy style={{ width: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h1>Both</h1>
<h2>Auto with overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Auto with horizontal overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal>
{Array.from({ length: 1 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Auto with vertical overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} style={{ width: 50, overflow: 'hidden' }} />
))}
</ScrollAreaStory>
<h2>Auto without overflow</h2>
<ScrollAreaStory type="auto" vertical horizontal>
<Copy style={{ width: 50, height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Always with overflow</h2>
<ScrollAreaStory type="always" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Always without overflow</h2>
<ScrollAreaStory type="always" vertical horizontal>
<Copy style={{ width: 50, height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Scroll with overflow</h2>
<ScrollAreaStory type="scroll" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Scroll without overflow</h2>
<ScrollAreaStory type="scroll" vertical horizontal>
<Copy style={{ width: 50, height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Hover with overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Hover without overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal>
<Copy style={{ width: 50, height: 50, overflow: 'hidden' }} />
</ScrollAreaStory>
<h2>Hover with horizontal overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal>
{Array.from({ length: 1 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Hover with vertical overflow</h2>
<ScrollAreaStory type="hover" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} style={{ width: 50, overflow: 'hidden' }} />
))}
</ScrollAreaStory>
<h1>Min thumb size</h1>
<ScrollAreaStory type="always" vertical horizontal>
{Array.from({ length: 100 }).map((_, index) => (
<Copy key={index} style={{ width: 10000 }} />
))}
</ScrollAreaStory>
<h1>RTL</h1>
<h2>Prop</h2>
<ScrollAreaStory type="always" dir="rtl" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
<h2>Inherited</h2>
<div dir="rtl">
<ScrollAreaStory type="always" vertical horizontal>
{Array.from({ length: 10 }).map((_, index) => (
<Copy key={index} />
))}
</ScrollAreaStory>
</div>
</>
);
Chromatic.parameters = { chromatic: { disable: false } };
const DYNAMIC_CONTENT_DELAY = 2000;
export const ChromaticDynamicContentBeforeLoaded = () => {
const [showContent, setShowContent] = React.useState(false);
React.useEffect(() => {
setTimeout(() => {
setShowContent(true);
}, DYNAMIC_CONTENT_DELAY);
}, []);
return (
<>
<h1>Always</h1>
<ScrollAreaStory type="always" style={{ width: 500, height: 250 }}>
{showContent ? (
Array.from({ length: 30 }).map((_, index) => <Copy key={index} />)
) : (
<h1>Loading...</h1>
)}
</ScrollAreaStory>
<h1>Hover</h1>
<ScrollAreaStory type="hover" style={{ width: 500, height: 250 }}>
{showContent ? (
Array.from({ length: 30 }).map((_, index) => <Copy key={index} />)
) : (
<h1>Loading...</h1>
)}
</ScrollAreaStory>
<h1>Scroll</h1>
<ScrollAreaStory type="scroll" style={{ width: 500, height: 250 }}>
{showContent ? (
Array.from({ length: 30 }).map((_, index) => <Copy key={index} />)
) : (
<h1>Loading...</h1>
)}
</ScrollAreaStory>
<h1>Auto</h1>
<ScrollAreaStory type="auto" style={{ width: 500, height: 250 }}>
{showContent ? (
Array.from({ length: 30 }).map((_, index) => <Copy key={index} />)
) : (
<h1>Loading...</h1>
)}
</ScrollAreaStory>
</>
);
};
ChromaticDynamicContentBeforeLoaded.parameters = { chromatic: { disable: false } };
export const ChromaticDynamicContentAfterLoaded = () => <ChromaticDynamicContentBeforeLoaded />;
ChromaticDynamicContentAfterLoaded.parameters = {
chromatic: { disable: false, delay: DYNAMIC_CONTENT_DELAY },
};
const ScrollAreaStory = ({ children, vertical = true, horizontal = true, ...props }: any) => (
<ScrollArea.Root
{...props}
className={scrollAreaClass}
style={{ width: 200, height: 200, ...props.style }}
>
<ScrollArea.Viewport className={scrollAreaViewportClass}>{children}</ScrollArea.Viewport>
{vertical && (
<ScrollArea.Scrollbar className={scrollbarClass} orientation="vertical">
<ScrollArea.Thumb className={thumbClass} />
</ScrollArea.Scrollbar>
)}
{horizontal && (
<ScrollArea.Scrollbar className={scrollbarClass} orientation="horizontal">
<ScrollArea.Thumb className={thumbClass} />
</ScrollArea.Scrollbar>
)}
<ScrollArea.Corner className={cornerClass} />
</ScrollArea.Root>
);
const Copy = (props: any) => (
<p style={{ width: 4000, marginTop: 0, ...props.style }}>
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce sit amet eros iaculis, bibendum
tellus ac, lobortis odio. Aliquam bibendum elit est, in iaculis est commodo id. Donec pulvinar
est libero. Proin consectetur pellentesque molestie. Fusce mi ante, ullamcorper eu ante finibus,
finibus pellentesque turpis. Mauris convallis, leo in vulputate varius, sapien lectus suscipit
eros, ac semper odio sapien sit amet magna. Sed mattis turpis et lacinia ultrices. Nulla a
commodo mauris. Orci varius natoque penatibus et magnis dis parturient montes, nascetur
ridiculus mus. Pellentesque id tempor metus. Pellentesque faucibus tortor non nisi maximus
dignissim. Etiam leo nisi, molestie a porttitor at, euismod a libero. Nullam placerat tristique
enim nec pulvinar. Sed eleifend dictum nulla a aliquam. Sed tempus ipsum eget urna posuere
aliquam. Nulla maximus tortor dui, sed laoreet odio aliquet ac. Vestibulum dolor orci, lacinia
finibus vehicula eget, posuere ac lectus. Quisque non felis at ipsum scelerisque condimentum. In
pharetra semper arcu, ut hendrerit sem auctor vel. Aliquam non lacinia elit, a facilisis ante.
Praesent eget eros augue. Praesent nunc orci, ullamcorper non pulvinar eu, elementum id nibh.
Nam id lorem euismod, sodales augue quis, porttitor magna. Vivamus ut nisl velit. Nam ultrices
maximus felis, quis ullamcorper quam luctus et.
</p>
);
const SCROLLBAR_SIZE = 8;
const RECOMMENDED_CSS__SCROLLAREA__ROOT: any = {
width: '100%',
height: '100%',
};
const scrollAreaClass = css({
...RECOMMENDED_CSS__SCROLLAREA__ROOT,
border: '1px solid black',
});
const RECOMMENDED_CSS__SCROLLAREA__VIEWPORT: any = {
width: '100%',
height: '100%',
};
const scrollAreaViewportClass = css({
...RECOMMENDED_CSS__SCROLLAREA__VIEWPORT,
});
const RECOMMENDED_CSS__SCROLLBAR__ROOT: any = {
display: 'flex',
// ensures no selection
userSelect: 'none',
// disable browser handling of all panning and zooming gestures on touch devices
touchAction: 'none',
};
const scrollbarClass = css({
...RECOMMENDED_CSS__SCROLLBAR__ROOT,
transition: 'background 160ms ease-out',
padding: 2,
background: 'rgba(0, 0, 0, 0.3)',
'&:hover': {
background: 'rgba(0, 0, 0, 0.5)',
},
'&[data-orientation="vertical"]': {
width: SCROLLBAR_SIZE,
},
'&[data-orientation="horizontal"]': {
flexDirection: 'column',
height: SCROLLBAR_SIZE,
},
});
const RECOMMENDED_CSS__SCROLLBAR__THUMB: any = {
flex: 1,
// increase target size for touch devices https://www.w3.org/WAI/WCAG21/Understanding/target-size.html
position: 'relative',
'&::before': {
content: '""',
position: 'absolute',
top: '50%',
left: '50%',
transform: 'translate(-50%, -50%)',
width: '100%',
height: '100%',
minWidth: 44,
minHeight: 44,
},
};
const thumbClass = css({
...RECOMMENDED_CSS__SCROLLBAR__THUMB,
background: 'black',
borderRadius: SCROLLBAR_SIZE,
});
const cornerClass = css({
background: 'rgba(0, 0, 0, 0.3)',
position: 'relative',
'&::after': {
content: '""',
position: 'absolute',
top: '50%',
left: '50%',
transform: 'translate(-50%, -50%)',
background: 'black',
width: SCROLLBAR_SIZE,
height: SCROLLBAR_SIZE,
borderRadius: SCROLLBAR_SIZE,
},
}); | the_stack |
import { editor } from "./editor"
import { menu } from "./menu"
import { scriptsData } from "./script-data"
import { isMac, dlog } from "./util"
import * as warningMessage from "./warning-message"
import savedScripts from "./saved-scripts"
import { EL } from "./dom"
class ToolbarUI {
el :HTMLElement
runButton :HTMLElement
clearButton :HTMLElement
menuButton :HTMLElement
saveButton :HTMLElement
backButton :HTMLElement
fwdButton :HTMLElement
stopCallbacks = new Set<()=>void>()
isRunning :bool = false
isFadedOut :bool = false
pointerInsideAt :number = 0 // when non-null, Date.now() when pointer entered toolbar
waitCount :number = 0
waitTimer :any = null
waitSpinnerEl :HTMLDivElement|null = null
addStopCallback(stopCallback :()=>void) {
this.stopCallbacks.add(stopCallback)
if (!this.isRunning) {
this.isRunning = true
this.updateUI()
}
// this.runButton.classList.add("flash")
// setTimeout(() => this.runButton.classList.remove("flash"), 300)
}
removeStopCallback(stopCallback :()=>void) {
this.stopCallbacks.delete(stopCallback)
if (this.stopCallbacks.size == 0) {
this.isRunning = false
this.updateUI()
}
}
// these are called by editor while its waiting for compilation, while run has been requested
// but the app is waiting for e.g. typescript worker to reply.
incrWaitCount() {
this.waitCount++
if (this.waitCount == 1) {
this.scheduleWaitIndicator()
}
}
decrWaitCount() {
this.waitCount--
if (this.waitCount == 0) {
this.cancelWaitIndicator()
}
}
scheduleWaitIndicator() {
if (this.waitTimer !== null) {
return console.warn(`race error in toolbar/scheduleWaitIndicator`)
}
this.waitTimer = setTimeout(() => {
if (this.runButton.classList.toggle("hide-icon", true)) {
this.runButton.appendChild(EL("div", { className: "progress-spinner on" }))
}
}, 100)
}
cancelWaitIndicator() {
if (this.runButton.firstChild) {
this.runButton.classList.remove("hide-icon")
this.runButton.removeChild(this.runButton.firstChild)
}
clearTimeout(this.waitTimer)
this.waitTimer = null
}
updateUI() {
this.runButton.classList.toggle("running", this.isRunning)
this.runButton.title = (
this.isRunning ? (
isMac ? "Stop ⇧⌘⏎"
: "Stop (Ctrl+Shift+Return)"
) : (
isMac ? "Run ⌘⏎"
: "Run (Ctrl+Return)"
)
)
}
fadeOut() {
// pointerInsideTimeout defines how old a pointerenter event can be while we consider
// the pointer as being inside the toolbar. This is needed because event handling on the web
// platform is a mess and thusd we can't actually know if the pointer is inside.
const pointerInsideMaxAge = 30000
if (!this.isFadedOut) {
if (menu.isVisible ||
(this.pointerInsideAt > 0 && Date.now() - this.pointerInsideAt < pointerInsideMaxAge)
) {
// don't fade out while the menu is open or the pointer is inside the toolbar
return
}
this.isFadedOut = true
this.el.classList.add("faded")
}
}
fadeIn() {
if (this.isFadedOut) {
this.isFadedOut = false
this.el.classList.remove("faded")
}
}
init() {
this.el = document.getElementById('toolbar') as HTMLElement
this.el.addEventListener("pointerenter", () => {
this.pointerInsideAt = Date.now()
this.fadeIn()
})
this.el.addEventListener("pointerleave", () => {
this.pointerInsideAt = 0
})
// menu button
this.menuButton = this.el.querySelector('.button.menu') as HTMLElement
let updateMenuButtonTitle = () => {
let verb = menu.isVisible ? "Hide" : "Show"
let shortcut = isMac ? "⌃M" : "(Ctrl+M)"
this.menuButton.title = `${verb} menu ${shortcut}`
}
updateMenuButtonTitle()
this.menuButton.addEventListener("click", ev => {
menu.toggle()
updateMenuButtonTitle()
ev.preventDefault()
ev.stopPropagation()
}, {passive:false,capture:true})
const updateMenuVisible = () => {
this.el.classList.toggle("menuVisible", menu.isVisible)
this.menuButton.classList.toggle("on", menu.isVisible)
}
menu.on("visibility", updateMenuVisible)
// new button
let newButton = this.el.querySelector('.button.new') as HTMLElement
newButton.addEventListener("click", ev => {
editor.newScript({ name: scriptsData.nextNewScriptName() })
ev.preventDefault()
ev.stopPropagation()
}, {passive:false,capture:true})
// save button
this.saveButton = this.el.querySelector('.button.save') as HTMLElement
this.saveButton.addEventListener("click", ev => {
editor.saveScriptToFigma()
ev.preventDefault()
ev.stopPropagation()
}, {passive:false,capture:true})
const updateSaveButton = () => {
let saveButtonAvailable = false
if (editor.currentScript) {
if (editor.currentScript.id >= 0) {
// Note: Example scripts (id<0) can't be saved since they have no GUID and
// they can not be edited (i.e. given a guid.) Plus, it makes no sense to save
// an example script as it's always available in Scripter.
saveButtonAvailable = !savedScripts.hasGUID(editor.currentScript.guid)
}
}
this.saveButton.classList.toggle("hidden", !saveButtonAvailable)
}
updateSaveButton()
savedScripts.on("change", updateSaveButton)
// history back and forward buttons
this.backButton = this.el.querySelector('.button.history-back') as HTMLElement
this.backButton.addEventListener("click", ev => {
editor.historyBack()
editor.focus()
ev.preventDefault()
ev.stopPropagation()
}, {capture:true})
this.fwdButton = this.el.querySelector('.button.history-forward') as HTMLElement
this.fwdButton.addEventListener("click", ev => {
editor.historyForward()
editor.focus()
ev.preventDefault()
ev.stopPropagation()
}, {capture:true})
const updateHistoryButtons = () => {
this.backButton.classList.toggle("unavailable", !editor.navigationHistory.canGoBack())
this.fwdButton.classList.toggle("unavailable", !editor.navigationHistory.canGoForward())
}
editor.navigationHistory.on("change", updateHistoryButtons)
updateHistoryButtons()
// clear button
this.clearButton = this.el.querySelector('.button.clear') as HTMLElement
this.clearButton.title = "Clear messages " + (
isMac ? "⌘K"
: "(Ctrl+L)"
)
handleClick(this.clearButton, () => {
warningMessage.hide()
editor.clearMessages()
})
const updateClearButton = () => {
let cl = this.clearButton.classList
let isROLib = !editor.currentScript || editor.currentScript.isROLib
cl.toggle("hidden", isROLib)
if (!isROLib) {
cl.toggle(
"unavailable",
editor.viewZones.count == 0 && editor.editorDecorationIds.length == 0
)
}
}
updateClearButton()
editor.viewZones.on("update", updateClearButton)
editor.on("decorationchange", updateClearButton)
// run button
this.runButton = this.el.querySelector('.button.run') as HTMLElement
handleClick(this.runButton, () => {
if (this.isRunning) {
for (let f of this.stopCallbacks) {
try {
f()
} catch (e) {
console.error(`[toolbar] error in stopCallback: ${e.stack||e}`)
}
}
} else {
editor.runCurrentScript()
}
})
const updateRunButton = () => {
let isROLib = !editor.currentScript || editor.currentScript.isROLib
this.runButton.classList.toggle("hidden", isROLib)
}
updateRunButton()
// update when script changes
scriptsData.on("change", () => {
updateSaveButton()
updateClearButton()
updateRunButton()
})
// title
initTitle(this.el)
this.updateUI()
}
}
function initTitle(host) {
let container = host.querySelector('.title') as HTMLElement
let input = container.querySelector('input') as HTMLInputElement
const editSession = new class {
active = false
restoreValue = "" // value at beginning of session, used for discarding changes cancel.
scriptID: number = 0 // ID of script being edited, for race detection
// possible state flows:
// A = begin -> end : input loses focus before ESC or RETURN is pressed
// B = begin -> cancel -> end : user presses ESC
// C = begin -> commit -> end : user presses RETURN
begin() {
dlog("title/editSession/begin")
this.active = true
this.restoreValue = input.value
this.scriptID = editor.currentScript.id
input.select()
requestAnimationFrame(() => input.select())
}
cancel() {
dlog("title/editSession/cancel")
input.value = this.restoreValue
this.active = false
editor.focus()
}
commit() {
dlog("title/editSession/commit")
this._commit()
editor.focus()
}
end() {
dlog("title/editSession/end")
if (this.active) {
this._commit()
}
this.restoreValue = ""
}
_commit() {
// called by commit() when user presses RETURN, or by end() when user relinquish focus
this.active = false
if (this.scriptID == editor.currentScript.id) {
// good, the same script is still active
editor.currentScript.name = input.value
editor.currentScript.save() // save immediately
}
}
}
input.onfocus = () => {
container.classList.add("focus")
if (!input.readOnly) {
editSession.begin()
}
}
input.onblur = () => {
container.classList.remove("focus")
editSession.end()
}
input.addEventListener("keydown", ev => {
// dlog("onkeypress", ev.key, ev.keyCode)
switch (ev.key) {
case "Escape": editSession.cancel() ; break
case "Enter": editSession.commit() ; break
default: return // let event pass through
}
ev.stopPropagation()
ev.preventDefault()
}, {capture:true,passive:false})
// click on outer container focuses input
input.addEventListener("pointerdown", ev => {
ev.stopPropagation()
}, {capture:true,passive:false})
container.onpointerdown = ev => {
if (!editSession.active) {
input.focus()
}
}
// title.onclick = () => menu.toggle(/* closeOnSelection */ true)
// update title when it was changed elsewhere, like in a different tab
let updateTitle = () => {
if (!editor.currentScript) {
return
}
let newTitle = editor.currentScript.name
if (editSession.active) {
// when title is actively edited, don't change the value of the input but instead
// change the "restore" value of editSession
editSession.restoreValue = newTitle
} else {
input.value = newTitle
}
input.readOnly = !editor.currentScript.isUserScript
}
scriptsData.on("change", updateTitle)
}
// click handler
function handleClick(el :HTMLElement, f :()=>void) {
el.addEventListener("click", ev => {
ev.preventDefault()
ev.stopPropagation()
f()
}, {passive:false,capture:true})
}
export default new ToolbarUI() | the_stack |
import { Injectable } from '@angular/core';
import {from } from 'rxjs/observable/from';
// firebase
import * as firebase from 'firebase/app';
import 'firebase/database';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
// services
import { Globals } from '../utils/globals';
// models
import { ConversationModel } from '../../models/conversation';
// utils
// tslint:disable-next-line:max-line-length
import { isJustRecived, getUrlImgProfile, getUnique, avatarPlaceholder, setColorFromString, getFromNow, compareValues } from '../utils/utils';
// import { ConsoleReporter } from 'jasmine';
import { SettingsSaverService } from '../providers/settings-saver.service';
import { ConversationComponent } from '../components/conversation-detail/conversation/conversation.component';
@Injectable()
export class ConversationsService {
// openConversations: ConversationModel[];
archivedConversations: Array<ConversationModel>;
// allConversations: ConversationModel[];
listConversations: Array<ConversationModel>;
// obsAllConversations: any;
obsArchivedConversations: any;
obsListConversations: any;
obsChangeConversation: any;
tenant: string;
senderId: string;
urlConversation: string;
urlArchivedConversation: string;
conversationRef: any;
audio: any;
badgeConversations: number;
setTimeoutSound: NodeJS.Timer;
avatarPlaceholder = avatarPlaceholder;
setColorFromString = setColorFromString;
getFromNow = getFromNow;
getUrlImgProfile = getUrlImgProfile;
constructor(
public g: Globals,
public settingsSaverService: SettingsSaverService
) {
// this.allConversations = new Array<ConversationModel>();
// this.openConversations = new Array<ConversationModel>();
this.archivedConversations = new Array<ConversationModel>();
this.listConversations = new Array<ConversationModel>();
// this.obsAllConversations = new BehaviorSubject<[ConversationModel]>(null);
this.obsArchivedConversations = new BehaviorSubject<[ConversationModel]>(null);
this.obsListConversations = new BehaviorSubject<[ConversationModel]>(null);
this.obsChangeConversation = new BehaviorSubject<ConversationModel>(null);
}
public initialize(senderId, tenant) {
// this.openConversations = [];
// this.archivedConversations = [];
// this.listConversations = [];
this.tenant = tenant;
this.senderId = senderId;
this.urlConversation = '/apps/' + this.tenant + '/users/' + this.senderId + '/conversations/';
this.urlArchivedConversation = '/apps/' + this.tenant + '/users/' + this.senderId + '/archived_conversations/';
const that = this;
/**
* se ho aperto il widget:
* controllo se sulla conversazione attiva (se esiste ed è una delle 3 in hp cioè appartiene a listConversations)
* e aggiorno la conversazione con 'is_new' = false;
* in tal caso aggiorno il badge
*/
this.g.obsIsOpen.subscribe((valIsOpen) => {
if ( valIsOpen === true ) {
that.listConversations.forEach(element => {
// if ( that.g.activeConversation === element.uid ) {
if ( that.g.activeConversation && that.g.activeConversation.uid === element.uid ) {
that.updateIsNew(element);
that.updateConversationBadge();
}
});
}
});
}
loadConversationDetail(tenant: string, senderId: string, idConversation: string): any {
const urlConversationDetail = '/apps/' + tenant + '/users/' + senderId + '/conversations/' + idConversation;
console.log('urlConversationDetail:: ', urlConversationDetail);
const firebaseConversationDetail = firebase.database().ref(urlConversationDetail);
return firebaseConversationDetail.once('value');
}
// // ============== begin:: subscribe to conversations ================//
// public checkListConversationsLimit(limit?) {
// if (!limit || limit <= 0) { limit = 100; }
// const that = this;
// const firebaseConversations = firebase.database().ref(this.urlConversation);
// const refListConvLimit = firebaseConversations.orderByChild('timestamp').limitToLast(limit);
// that.g.wdLog(['checkListConversationsLimit *****', this.urlConversation]);
// //// SUBSCRIBE ADDED ////
// refListConvLimit.on('child_added', function (childSnapshot) {
// that.g.wdLog(['111 childSnapshot.val() *****', childSnapshot.val(), that.g.filterByRequester]);
// const conversation = that.setConversation(childSnapshot, false);
// // tslint:disable-next-line:max-line-length
// tslint:disable-next-line:max-line-length
// if ( that.g.filterByRequester === false || (that.g.filterByRequester === true && conversation.attributes.requester_id === that.g.senderId ) ) {
// that.checkIsNew(conversation);
// that.checkIsSound(conversation);
// that.listConversations.unshift(conversation); // insert item to top array
// that.g.wdLog(['222 listConversations *****', that.listConversations.length, limit]);
// if (that.listConversations && that.listConversations.length > limit) {
// that.listConversations.slice(0, (limit - 1));
// }
// that.listConversations.sort(compareValues('timestamp', 'desc'));
// that.g.wdLog(['333 listConversations *****']);
// console.log(that.listConversations);
// that.updateConversationBadge();
// that.obsListConversations.next(that.listConversations);
// }
// });
// //// SUBSCRIBE CHANGED ////
// refListConvLimit.on('child_changed', function (childSnapshot) {
// const index = that.searchIndexInArrayForUid(that.listConversations, childSnapshot.key);
// const conversation = that.setConversation(childSnapshot, false);
// //if (index > 0 ) {
// that.listConversations.splice(index, 1, conversation);
// if (that.listConversations && that.listConversations.length > limit) {
// that.listConversations.slice(0, (limit - 1));
// }
// that.listConversations.sort(compareValues('timestamp', 'desc'));
// that.checkIsNew(conversation);
// that.checkIsSound(conversation);
// that.g.wdLog(['checkListConversationsLimit child_changed *****', that.listConversations.length, index]);
// that.updateConversationBadge();
// that.obsListConversations.next(that.listConversations);
// //}
// });
// //// SUBSCRIBE REMOVED ////
// refListConvLimit.on('child_removed', function (childSnapshot) {
// const index = that.searchIndexInArrayForUid(that.listConversations, childSnapshot.key);
// if (index > -1) {
// that.listConversations.splice(index, 1);
// that.obsListConversations.next(that.listConversations);
// }
// that.updateConversationBadge();
// });
// }
// // ============== end:: subscribe to conversations ================//
// ============== begin:: subscribe to conversations ================//
public checkListConversations() {
const limit = 10;
const that = this;
const firebaseConversations = firebase.database().ref(this.urlConversation);
this.conversationRef = firebaseConversations.orderByChild('timestamp').limitToLast(limit);
that.g.wdLog(['checkListConversations *****', this.urlConversation]);
//// SUBSCRIBE ADDED ////
this.conversationRef.on('child_added', function (childSnapshot) {
const conversation = that.setConversation(childSnapshot, false);
that.g.wdLog(['child_added val *****', childSnapshot.val()]);
if (that.ifCanAddConversation(conversation)) {
const index = that.searchIndexInArrayForUid(that.listConversations, childSnapshot.key);
if (index === -1) {
setTimeout(function () {
// console.log('***** NEXT *****');
that.listConversations.unshift(conversation); // insert item top array
that.checkIsNew(conversation);
that.checkIsSound(conversation);
that.updateConversationBadge();
that.listConversations.sort(compareValues('timestamp', 'desc'));
that.listConversations = getUnique(that.listConversations, 'uid');
that.g.wdLog(['checkListConversations - child_added: ', that.listConversations.length]);
that.obsListConversations.next(that.listConversations);
that.triggerOnConversationUpdated(conversation);
}, 0);
}
}
});
// SUBSCRIBE CHANGED ////
this.conversationRef.on('child_changed', function (childSnapshot) {
that.g.wdLog(['child_changed val *****', childSnapshot.val()]);
const conversation = that.setConversation(childSnapshot, false);
if (that.ifCanAddConversation(conversation)) {
const index = that.searchIndexInArrayForUid(that.listConversations, childSnapshot.key);
if (index > -1) {
setTimeout(function () {
that.listConversations.splice(index, 1, conversation);
that.checkIsNew(conversation);
that.checkIsSound(conversation);
that.updateConversationBadge();
that.listConversations.sort(compareValues('timestamp', 'desc'));
that.listConversations = getUnique(that.listConversations, 'uid');
that.g.wdLog(['checkListConversations child_changed *****', that.listConversations, index]);
that.obsListConversations.next(that.listConversations);
that.obsChangeConversation.next(conversation);
that.triggerOnConversationUpdated(conversation);
}, 0);
}
}
});
//// SUBSCRIBE REMOVED ////
this.conversationRef.on('child_removed', function (childSnapshot) {
that.g.wdLog(['child_removed val *****', childSnapshot.val()]);
const index = that.searchIndexInArrayForUid(that.listConversations, childSnapshot.key);
if (index > -1) {
setTimeout(function () {
that.listConversations.splice(index, 1);
that.listConversations.sort(compareValues('timestamp', 'desc'));
that.listConversations = getUnique(that.listConversations, 'uid');
that.obsListConversations.next(that.listConversations);
}, 0);
}
that.updateConversationBadge();
});
}
// ============== end:: subscribe to conversations ================//
// ========= begin:: subscribe to archived conversations ============//
public checkListArchivedConversations(limit?) {
if (!limit || limit <= 0) { limit = 100; }
const that = this;
const firebaseConversations = firebase.database().ref(this.urlArchivedConversation);
const ref = firebaseConversations.orderByChild('timestamp').limitToLast(limit);
that.g.wdLog(['checkListArchivedConversations *****', this.urlArchivedConversation]);
//// SUBSCRIBE ADDED ////
ref.on('child_added', function (childSnapshot) {
that.g.wdLog(['childSnapshot.val() *****', childSnapshot.val()]);
const conversation = that.setConversation(childSnapshot, true);
if (that.ifCanAddConversation(conversation)) {
const index = that.searchIndexInArrayForUid(that.archivedConversations, childSnapshot.key);
if (index === -1) {
that.archivedConversations.unshift(conversation); // insert item top array
that.checkIsNew(conversation);
that.archivedConversations.sort(compareValues('timestamp', 'desc'));
that.archivedConversations = getUnique(that.archivedConversations, 'uid');
that.obsArchivedConversations.next(that.archivedConversations);
that.triggerOnConversationUpdated(conversation);
}
}
});
//// SUBSCRIBE CHANGED ////
ref.on('child_changed', function (childSnapshot) {
that.g.wdLog(['child_changed.val() *****', childSnapshot.val()]);
const conversation = that.setConversation(childSnapshot, true);
if (that.ifCanAddConversation(conversation)) {
const index = that.searchIndexInArrayForUid(that.archivedConversations, childSnapshot.key);
if (index > -1) {
that.archivedConversations.splice(index, 1, conversation);
that.checkIsNew(conversation);
that.archivedConversations.sort(compareValues('timestamp', 'desc'));
that.archivedConversations = getUnique(that.archivedConversations, 'uid');
that.obsArchivedConversations.next(that.archivedConversations);
that.g.wdLog([' checkListArchivedConversations child_changed *****', that.archivedConversations, index]);
that.triggerOnConversationUpdated(conversation);
}
}
});
//// SUBSCRIBE REMOVED ////
ref.on('child_removed', function (childSnapshot) {
that.g.wdLog(['child_removed.val() *****', childSnapshot.val()]);
const index = that.searchIndexInArrayForUid(that.archivedConversations, childSnapshot.key);
if (index > -1) {
that.archivedConversations.splice(index, 1);
that.archivedConversations.sort(compareValues('timestamp', 'desc'));
that.archivedConversations = getUnique(that.archivedConversations, 'uid');
that.obsArchivedConversations.next(that.archivedConversations);
// that.updateConversations();
}
// that.updateConversationBadge();
});
}
// ========= end:: subscribe to archived conversations ============//
/**
* Verifica se aggiungere o meno una conversazione
* @param conversation
*/
private ifCanAddConversation(conversation: ConversationModel) {
// console.log('***** CONTROLLO FILTRO BY REQUESTER ID *****');
this.g.wdLog(['***** filterByRequester *****', this.g.filterByRequester]);
// this.g.wdLog(['***** requester_id *****', conversation.attributes.requester_id]);
this.g.wdLog(['***** that.g.senderId *****', this.g.senderId]);
if (conversation.uid === 'undefined') {
return false;
} else if (this.g.filterByRequester === false ||
(this.g.filterByRequester === true && conversation.attributes && conversation.attributes.requester_id === this.g.senderId)
) {
return true;
} else {
this.g.wdLog(['***** ifCanAddConversation *****', conversation, false]);
return false;
}
}
/**
* 1 - concat array conversations
* 2 - order array
* 3 - aggiorno stato conversazione
*/
// public updateConversations() {
// const TEMP = this.openConversations.concat(this.archivedConversations);
// const result = [];
// const map = new Map();
// for (const item of TEMP) {
// if (!map.has(item.uid)) {
// map.set(item.uid, true); // set any value to Map
// result.push(item);
// }
// }
// this.allConversations = result;
// // this.allConversations.map(item => item.uid).filter((value, index, self) => self.indexOf(value) === index);
// this.allConversations.sort(compareValues('timestamp', 'desc'));
// that.g.wdLog([' updateConversations:::: ', this.allConversations.length]);
// this.obsAllConversations.next(this.allConversations);
// }
/**
*
*/
public checkIsSound(conversation) {
console.log('1 -- checkIsSound conversation', conversation);
console.log('1 -- checkIsSound this.g.activeConversation', this.g.activeConversation);
// console.log('1 -- checkIsSound this.g.activeConversation.recipient', this.g.activeConversation.recipient);
// console.log('1 -- checkIsSound conversation.recipient', conversation.recipient);
console.log('1 -- checkIsSound conversation.sender', conversation.sender);
console.log('1 -- checkIsSound this.senderId', this.senderId);
console.log('1 -- checkIsSound conversation.is_new', conversation.is_new);
if (!this.g.activeConversation ||
(this.g.activeConversation.recipient !== conversation.recipient
&& conversation.sender !== this.senderId
&& conversation.is_new === true)) {
console.log('sound-massege: ', conversation.timestamp);
// const badge = (conversation.is_new) ? 1 : 0;
// that.updateBadge(conversation, badge);
// this.soundMessage(conversation.timestamp);
}
}
// ========= begin:: isNew value in conversation ============//
/**
* check if new or modify conversation is on active conversation
*/
public checkIsNew(conversation) {
if ( this.g.activeConversation && this.g.activeConversation.uid === conversation.uid && this.g.isOpen === true ) {
this.updateIsNew(conversation);
}
}
/** */
public updateIsNew(conversation) {
let urlUpdate = '';
if (conversation.archived === true) {
urlUpdate = this.urlArchivedConversation + conversation.recipient;
} else {
urlUpdate = this.urlConversation + conversation.recipient;
}
const update = {};
update['/is_new'] = false;
this.g.wdLog(['**** updateIsNew::' + urlUpdate]);
return firebase.database().ref(urlUpdate).update(update);
}
// ========= end:: isNew value in conversation ============//
updateConversationBadge() {
let conversationsBadge = 0;
// console.log("this.listConversations", this.listConversations);
this.listConversations.forEach(element => {
// console.log("element", element);
if (element.is_new === true && element.archived === false) {
conversationsBadge++;
}
});
this.g.wdLog(['updateConversationBadge', conversationsBadge]);
// console.log("updateConversationBadge", conversationsBadge);
this.g.conversationsBadge = conversationsBadge;
this.g.setParameter('conversationsBadge', conversationsBadge);
// this.settingsSaverService.setVariable('conversationsBadge', conversationsBadge);
}
// updateBadge(conversation, badge) {
// const urlUpdate = this.urlConversation + conversation.recipient + '/badge/';
// const update = {};
// update [urlUpdate] = badge;
// that.g.wdLog(['**** updateBadge::' + urlUpdate);
// return firebase.database().ref().update(update);
// }
private setConversation(childSnapshot, archived) {
const that = this;
that.g.wdLog(['setConversation -> snapshot.val() *****', that.senderId, childSnapshot.val()]);
if (childSnapshot.val()) {
const conversation: ConversationModel = childSnapshot.val();
conversation.uid = childSnapshot.key;
conversation.last_message_text = conversation.last_message_text;
const timestampNumber = conversation.timestamp / 1000;
conversation.time_last_message = that.getFromNow(this.g.windowContext, timestampNumber);
// conversation.time_last_message = that.getFromNow(timestampNumber);
conversation.archived = archived;
if (conversation.sender === that.senderId) {
conversation.sender_fullname = this.g.YOU;
}
if (conversation.sender !== that.senderId) {
conversation.avatar = that.avatarPlaceholder(conversation.sender_fullname);
conversation.color = that.setColorFromString(conversation.sender_fullname);
conversation.image = that.getUrlImgProfile(conversation.sender);
} else {
conversation.avatar = that.avatarPlaceholder(conversation.recipient_fullname);
conversation.color = that.setColorFromString(conversation.recipient_fullname);
conversation.image = that.getUrlImgProfile(conversation.recipient);
}
// that.checkFirebaseUrl(conversation.image);
return conversation;
}
return;
}
private checkFirebaseUrl(imageurl: string) {
// tslint:disable-next-line:max-line-length
// console.log('checkFirebaseUrl::: ', imageurl);
// const storageRef = firebase.storage().refFromURL(imageurl);
// storageRef.getDownloadURL()
// .then((response) => {
// // Found it. Do whatever
// console.log(':::ESISTE::: ', response);
// return imageurl;
// })
// .catch((err) => {
// // Didn't exist... or some other error
// //console.log(':::ERRORE::: ', err);
// //return null;
// });
}
setImageConversation(conv, conversation_with) {
const IMG_PROFILE_SUPPORT = 'https://user-images.githubusercontent.com/32448495/39111365-214552a0-46d5-11e8-9878-e5c804adfe6a.png';
conv.image = IMG_PROFILE_SUPPORT;
// if (conv.channel_type === TYPE_DIRECT) {
const urlNodeConcacts = '/apps/' + this.tenant + '/contacts/' + conv.sender + '/imageurl/';
this.g.wdLog(['setImageConversation *****', urlNodeConcacts]);
firebase.database().ref(urlNodeConcacts).once('value')
.then(function (snapshot) {
if (snapshot.val().trim()) {
conv.image = snapshot.val();
}
})
.catch(function (err) {
console.log(err);
});
// }
}
/**
* regola sound message:
* se lo invio io -> NO SOUND
* se non sono nella conversazione -> SOUND
* se sono nella conversazione in fondo alla pagina -> NO SOUND
* altrimenti -> SOUND
*/
soundMessage(timestamp?) {
this.g.wdLog(['****** soundMessage startedAt *****', this.g.startedAt.getTime()]);
this.g.wdLog(['****** soundMessage timestamp *****', timestamp]);
if (!isJustRecived(this.g.startedAt.getTime(), timestamp)) {
return;
}
if (this.g.soundEnabled === true) {
const that = this;
this.audio = new Audio();
this.audio.src = this.g.baseLocation + '/assets/sounds/justsaying.mp3';
this.audio.load();
clearTimeout(this.setTimeoutSound);
this.setTimeoutSound = setTimeout(function () {
that.audio.play();
that.g.wdLog(['****** soundMessage 2 *****']);
}, 1000);
// console.log('conversations play');
}
}
searchIndexInArrayForUid(items, key) {
return items.findIndex(i => i.recipient === key);
}
// ========= START:: TRIGGER FUNCTIONS ============//
/** */
private triggerOnConversationUpdated( conversation: ConversationModel) {
this.g.wdLog([' ---------------- triggerOnConversationUpdated ---------------- ', conversation]);
const onConversationUpdated = new CustomEvent('onConversationUpdated', { detail: { conversation: conversation } });
const windowContext = this.g.windowContext;
if (windowContext.tiledesk && windowContext.tiledesk.tiledeskroot) {
windowContext.tiledesk.tiledeskroot.dispatchEvent(onConversationUpdated);
this.g.windowContext = windowContext;
} else {
// this.el.nativeElement.dispatchEvent(onMessageCreated);
}
}
// ========= END:: TRIGGER FUNCTIONS ============//
} | the_stack |
import 'jest';
import { createHandlerMap } from '.';
import { INITIAL_STATE_KEY } from '../constants';
import { resolveTypes, setOptions } from 'resolve-types';
import { Diagnostic } from 'typescript';
setOptions({ noErrorTruncation: true });
const getDiagnosticMessages = (diagnostics: ReadonlyArray<Diagnostic>) =>
diagnostics.map(
d =>
typeof d.messageText === 'string'
? d.messageText
: d.messageText.messageText
);
describe('Handler Map', () => {
describe('createHandlerMap runtime', () => {
it(`adds the initial state under the ${INITIAL_STATE_KEY} field`, () => {
const initial = { a: 3, b: 'foo' };
const hm = createHandlerMap(initial, {});
expect(hm[INITIAL_STATE_KEY]).toEqual(initial);
});
it('returns the handler map unchanged otherwise', () => {
const map = { a: 3, b: 'foo' };
const hm = createHandlerMap({}, map);
for (const key in map) {
expect(hm[key]).toEqual(map[key]);
}
});
});
const testParameters = [
{
type: "'foo' | 'bar' | 'quox'",
expectedIn: 'Foo',
expectedOut: 'Foo',
returnTypeAnnotation: ' as Foo',
foo: "'foo'",
bar: "'bar'",
quox: "'quox'"
},
{
type: 'number[]',
expectedIn: 'number[]',
expectedOut: 'number[]',
foo: '[]',
bar: '[1,2,3]',
quox: '[]'
},
{
type: '{ a: number; b: string; c: { d: number; }; }',
expectedIn: 'Foo',
expectedOut: 'Partial<Foo>',
foo: '{ a: 3 }',
bar: '{ a: 5, b: "bar" }',
quox: '{ b: "quox", c: { d: 5 } }'
}
];
for (let {
type,
expectedIn,
expectedOut,
returnTypeAnnotation,
foo,
bar,
quox
} of testParameters) {
returnTypeAnnotation = returnTypeAnnotation || '';
describe(`createHandlerMap return type for type ${type}`, () => {
const code = `
import { createHandlerMap, HandlerType, HandlerMap } from './src/handler-map';
type Foo = ${type};
const initialState = ${foo} as Foo;
const emptyHandlerMap = createHandlerMap(initialState, {});
type __emptyHandlerMapType = typeof emptyHandlerMap;
const stringLiteralHandlerMap = createHandlerMap(initialState, {
SET_FOO: ${foo}${returnTypeAnnotation},
});
type __stringLiteralHandlerMapType = typeof stringLiteralHandlerMap;
const nullarySetterHandlerMap = createHandlerMap(initialState, {
SET_BAR: () => (${bar}${returnTypeAnnotation}),
});
type __nullarySetterHandlerMapType = typeof nullarySetterHandlerMap;
const unarySetterHandlerMap = createHandlerMap(initialState, {
SET_QUOX: _s => (${foo}${returnTypeAnnotation})
});
type __unarySetterHandlerMapType = typeof unarySetterHandlerMap;
const binarySetterHandlerMap = createHandlerMap(initialState, {
SET_BAAZ: (s, x: number) => (x > 0 ? ${bar} : ${quox}),
});
type __binarySetterHandlerMapType = typeof binarySetterHandlerMap;
const allTogetherHandlerMap = createHandlerMap(initialState, {
SET_FOO: ${foo}${returnTypeAnnotation} ,
SET_BAR: () => (${bar}${returnTypeAnnotation}),
SET_QUOX: s => (s === ${foo} ? ${bar} : ${quox}),
SET_BAAZ: (s, x: number) => (x > 0 ? ${bar} : ${quox}),
});
type __allTogetherHandlerMapType = typeof allTogetherHandlerMap;
`;
const { types, diagnostics } = resolveTypes(code);
it('works without type errors', () => {
expect(getDiagnosticMessages(diagnostics)).toEqual([]);
});
it('is an empty object for an empty handler map', () => {
expect(types['__emptyHandlerMapType']).toEqual('{}');
});
it('has correct literal types for handlers directly providing values', () => {
expect(types['__stringLiteralHandlerMapType']).toEqual(
`{ SET_FOO: ${expectedOut}; }`
);
});
it('has correct types for a nullary setter function', () => {
expect(types['__nullarySetterHandlerMapType']).toEqual(
`{ SET_BAR: () => ${expectedOut}; }`
);
});
it('has correct types for a unary setter function', () => {
expect(types['__unarySetterHandlerMapType']).toEqual(
`{ SET_QUOX: (state: ${expectedIn}) => ${expectedOut}; }`
);
});
it('has correct types for a binary setter function', () => {
expect(types['__binarySetterHandlerMapType']).toEqual(
`{ SET_BAAZ: (state: ${expectedIn}, payload: number) => ${expectedOut}; }`
);
});
it('has correct types for all handlers together', () => {
expect(types['__allTogetherHandlerMapType']).toEqual(
`{ SET_FOO: ${expectedOut}; SET_BAR: () => ${expectedOut}; SET_QUOX: (state: ${expectedIn}) => ${expectedOut}; SET_BAAZ: (state: ${expectedIn}, payload: number) => ${expectedOut}; }`
);
});
});
}
describe('createHandlerMap creates type errors when', () => {
const testParameters = [
{
type: "'foo' | 'bar' | 'quox'",
values: { valid: "'foo'", invalid: "'baaz'" },
typeSynonyms: {
state: ['Foo', "'foo' | 'bar' | 'quox'"],
invalid: ['"baaz"', 'string', '"foo"']
}
},
{
type: 'number[]',
values: { valid: '[1, 2]', invalid: "[1, 2, 'bar']" },
typeSynonyms: {
state: ['number[]'],
invalid: ['(string | number)[]', 'number[]']
}
},
{
type: '{ a: number; b: string; c: { d: number; }; }',
values: {
valid: "{ a: 3, b: 'foo', c: { d: 5 } }",
invalid: '{ a: "foo" }'
},
typeSynonyms: {
state: ['Foo'],
invalid: [
'{ a: string; }',
'{ a: number; b: string; c: { d: number; }; }'
]
}
}
];
/**
* Computes all combinations of strings. [['a', 'b'], ['c'], ['x', 'y']]
* gives [['a', 'c', 'x'], ['b', 'c', 'x'], ['a', 'c', 'y'], ['b', 'c', 'y']]
*/
const allCombinations = (xs: string[][]): string[][] => {
if (xs.length <= 0) {
return [];
}
if (xs.length === 1) {
return xs[0].map(x => [x]);
}
const result: string[][] = [];
const subResults = allCombinations(xs.slice(1));
for (const subResult of subResults) {
for (const x of xs[0]) {
result.push([x, ...subResult]);
}
}
return result;
};
const createMessages = (
msgCreator: (...xs: string[]) => string,
synonyms: string[][]
) =>
allCombinations(synonyms).map(combination =>
msgCreator(...combination)
);
const expectToEqualWithSynonyms = (
msg: string,
msgProvider: (...xs: string[]) => string,
synonyms: string[][]
): void => {
const msgs = createMessages(msgProvider, synonyms);
expect(msgs).toContain(msg);
};
for (const { type, values, typeSynonyms } of testParameters) {
const handlerPairs = [
[`{ SETTER: ${values.invalid} }`, it => `{ SETTER: ${it}; }`],
[
`{ NULLARY: () => (${values.invalid}) }`,
it => `{ NULLARY: () => ${it}; }`
],
[
`{ UNARY: (payload: number) => (${values.invalid}) }`,
it => `{ UNARY: (payload: number) => ${it}; }`
],
[
`{ BINARY: (state: ${
typeSynonyms.state[0]
}, payload: number) => (${values.invalid}) }`,
it =>
`{ BINARY: (state: ${
typeSynonyms.state[0]
}, payload: number) => ${it}; }`
],
// Invalid state parameter
[
`{ BINARY: (state: number, payload: number) => (${
values.valid
}) }`,
it =>
`{ BINARY: (state: number, payload: number) => ${it}; }`
]
].map(
([handlerMap, typesCreator]: [string, (x: string) => string]) =>
[
handlerMap,
createMessages(typesCreator, [typeSynonyms.invalid])
] as [string, string[]]
);
for (const [handlerMap, handlerTypes] of handlerPairs) {
it(`invalid handler map ${handlerMap} for state ${type} is provided`, () => {
const code = `
import { createHandlerMap, HandlerType } from './src/handler-map';
type Foo = ${type};
const initialState = ${values.valid} as Foo;
const stringLiteralHandlerMap = createHandlerMap(initialState, ${handlerMap});
type __wrongHandlerType = typeof stringLiteralHandlerMap;
`;
const message = getDiagnosticMessages(
resolveTypes(code).diagnostics
)[0];
const synonyms = [handlerTypes, [type, 'Foo']];
const messageCreator = (
handlerType: string,
stateType: string
) =>
`Argument of type '${handlerType}' is not assignable to parameter of type 'HandlerMapConstraint<${stateType}>'.`;
expectToEqualWithSynonyms(
message,
messageCreator,
synonyms
);
});
}
}
});
describe('type operators', () => {
const { types, diagnostics } = resolveTypes`
import { createHandlerMap, StateFromHandlerMap, ActionsFromHandlerMap } from './src/handler-map';
const initial = 3 as string | number;
const map = {
FOO: () => 3,
BAR: "hi there",
SET: (payload: number) => payload,
QUOX: (state, payload: number) => payload > 3 ? payload : state
};
const constructedHm = createHandlerMap(initial, map);
type __StateConstructedHm = StateFromHandlerMap<typeof constructedHm>;
type __StateInferredHm = StateFromHandlerMap<typeof map>;
type __Actions = ActionsFromHandlerMap<typeof constructedHm>;
`;
describe('StateFromHandlerMap', () => {
it('correctly extracts the type from a given handler map', () => {
expect(getDiagnosticMessages(diagnostics)).toEqual([]);
expect(types['__StateConstructedHm']).toEqual(
'string | number'
);
expect(types['__StateInferredHm']).toEqual('any');
});
});
describe('ActionsFromHandlerMap', () => {
it('extracts the right actions from a given handler map', () => {
expect(types['__Actions']).toEqual("{ type: \"FOO\"; } | { type: \"BAR\"; } | { type: \"SET\"; } | { type: \"QUOX\"; payload: number; }");
});
});
});
}); | the_stack |
import { debounce } from "debounce";
import * as fs from "fs";
import * as vscode from "vscode";
import { setsAreEqual } from "./sets";
import { FileNode, fileNodeSort } from "./tag-tree/file-node";
import { TagNode, tagNodeSort } from "./tag-tree/tag-node";
import { TagTree } from "./tag-tree/tag-tree";
import * as grayMatter from "gray-matter";
import { Uri } from "vscode";
import * as path from "path";
interface IFileInfo {
tags: Set<string>;
filePath: string;
}
class TagTreeDataProvider
implements vscode.TreeDataProvider<TagNode | FileNode> {
private tagTree: TagTree;
// Responsible for notifying the TreeDataProvider to update for the specified element in tagTree
private _onDidChangeTreeData: vscode.EventEmitter<
TagNode | FileNode | null
> = new vscode.EventEmitter<TagNode | FileNode | null>();
/*
* An optional event to signal that an element or root has changed.
* This will trigger the view to update the changed element/root and its children recursively (if shown).
* To signal that root has changed, do not pass any argument or pass undefined or null.
*/
readonly onDidChangeTreeData: vscode.Event<TagNode | FileNode | null> = this
._onDidChangeTreeData.event;
constructor() {
/* Register the extension to events of interest
* Debounce to improve performance. Otherwise a file read would occur during each of the user's changes to the document.
*/
vscode.workspace.onDidChangeTextDocument(
debounce(
(e: vscode.TextDocumentChangeEvent) => this.onDocumentChanged(e),
500
)
);
vscode.workspace.onWillSaveTextDocument(e => {
this.onWillSaveTextDocument(e);
});
// @ts-ignore
const additionalFileTypes: string[] = vscode.workspace
.getConfiguration()
.get("vscode-nested-tags.additionalFileTypes");
const customGlobPattern =
additionalFileTypes.length > 0 ? `,${additionalFileTypes.join(",")}` : "";
const globPattern = `{md${customGlobPattern}}`;
const fileWatcher = vscode.workspace.createFileSystemWatcher("**/*");
fileWatcher.onDidDelete((...args) => this.onDidDelete(...args));
fileWatcher.onDidCreate((...args) => this.onDidCreate(...args));
this.tagTree = new TagTree();
/*
* Add all files in the current workspace folder to the tag tree
*/
(async () => {
const uris = await vscode.workspace.findFiles(`**/*.${globPattern}`);
const infos = await Promise.all(
uris.map(uri => this.getTagsFromFileOnFileSystem(uri.fsPath))
);
infos
.filter(info => info.tags.size > 0)
.forEach(info => {
const displayName = this.getPathRelativeToWorkspaceFolder(
Uri.file(info.filePath)
);
this.tagTree.addFile(info.filePath, [...info.tags], displayName);
});
this._onDidChangeTreeData.fire();
})();
}
/**
* Required for implementing TreeDataProvider interface.
*
* @param {(TagNode | FileNode)} element
* @returns
* @memberof TagTreeDataProvider
*/
public getChildren(element: TagNode | FileNode) {
if (element instanceof FileNode) {
return [];
} else if (element === undefined) {
// Convert the tags and files sets to arrays, then sort the arrays add tags first, then files
const children = [
...[...this.tagTree.root.tags.values()].sort(tagNodeSort),
...[...this.tagTree.root.files.values()].sort(fileNodeSort)
];
return children;
} else {
const children = [
...[...element.tags.values()].sort(tagNodeSort),
...[...element.files.values()].sort(fileNodeSort)
];
return children;
}
}
/**
* Required for implementing TreeDataProvider interface.
*
* @param {(TagNode | FileNode)} element
* @returns {vscode.TreeItem}
* @memberof TagTreeDataProvider
*/
public getTreeItem(element: TagNode | FileNode): vscode.TreeItem {
const tagTreeNode = this.tagTree.getNode(element.pathToNode);
const { displayName } = tagTreeNode;
const isFile = tagTreeNode instanceof FileNode;
const collapsibleState = isFile
? vscode.TreeItemCollapsibleState.None
: vscode.TreeItemCollapsibleState.Collapsed;
const result = new vscode.TreeItem(displayName, collapsibleState);
if (isFile) {
result.command = {
arguments: [vscode.Uri.file(tagTreeNode.filePath)],
command: "vscode.open",
title: "Jump to tag reference"
};
}
return result;
}
/**
* Update the ui view if the document that is about to be saved has a different set of tags than
* what is located in the currentState of the tag tree. This keeps the tree view in sync with
* any changes to tags for a document before saving.
* @param changeEvent
*/
private async onWillSaveTextDocument(
changeEvent: vscode.TextDocumentWillSaveEvent
): Promise<void> {
if (
changeEvent.document.isDirty &&
this.matchesWatchedFileExtensions(changeEvent.document.uri)
) {
const filePath = changeEvent.document.fileName;
const fileInfo = await this.getTagsFromFileOnFileSystem(filePath);
const tagsInTreeForFile = this.tagTree.getTagsForFile(filePath);
// @ts-ignore
this.updateTreeForFile(filePath, tagsInTreeForFile, fileInfo.tags);
}
}
/**
* Updates the tagTree and the ui tree view upon _every_ _single_ _change_ (saved or unsaved)
* to a document. This method helps to keep the tag contents of the document in sync with the
* tag tree view in the UI. This method fires for documents that have already been written to
* the file system or are still in memory.
*
* @param changeEvent
*/
private onDocumentChanged(changeEvent: vscode.TextDocumentChangeEvent): void {
const filePath = changeEvent.document.fileName;
// If the file has been saved and the file is a watched file type allow for making changes to the tag tree
if (
filePath !== undefined &&
this.matchesWatchedFileExtensions(changeEvent.document.uri)
) {
const fileInfo = this.getTagsFromFileText(
changeEvent.document.getText(),
filePath
);
const tagsInTreeForFile = this.tagTree.getTagsForFile(filePath);
const isUpdateNeeded = !setsAreEqual(fileInfo.tags, tagsInTreeForFile);
/*
* This could be potentially performance intensive due to the number of changes that could
* be made to a document and how large the document is. There will definitely need to be some
* work done around TagTree to make sure that the code is performant.
*/
if (isUpdateNeeded) {
this.tagTree.deleteFile(filePath);
const displayName = this.getPathRelativeToWorkspaceFolder(
Uri.file(filePath)
);
this.tagTree.addFile(
filePath,
[...fileInfo.tags.values()],
displayName
);
// TODO: (bdietz) - this._onDidChangeTreeData.fire(specificNode?)
this._onDidChangeTreeData.fire();
}
}
}
private async onDidDelete(fileUri: vscode.Uri) {
// I'm not sure if it matters whether or not the item is attempted to be deleted
this.tagTree.deleteFile(fileUri.fsPath);
this._onDidChangeTreeData.fire();
}
private async onDidCreate(fileUri: vscode.Uri) {
if (!fs.lstatSync(fileUri.fsPath).isDirectory()) {
const displayName = this.getPathRelativeToWorkspaceFolder(fileUri);
const fileInfo = await this.getTagsFromFileOnFileSystem(fileUri.fsPath);
this.tagTree.addFile(
fileUri.fsPath,
[...fileInfo.tags.values()],
displayName
);
this._onDidChangeTreeData.fire();
}
}
/**
*
* @param filePath The uri path to the file
* @param tagsBefore The tags before a change to the document
* @param tagsAfter The tags after a change to the document
*/
private updateTreeForFile(
filePath: string,
tagsBefore: Set<string>,
tagsAfter: Set<string>
) {
const isUpdateNeeded = !setsAreEqual(tagsBefore, tagsAfter);
if (isUpdateNeeded) {
this.tagTree.deleteFile(filePath);
const displayName = this.getPathRelativeToWorkspaceFolder(
Uri.file(filePath)
);
this.tagTree.addFile(filePath, [...tagsAfter.values()], displayName);
/*
* TODO (bdietz) - this._onDidChangeTreeData.fire(specificNode?)
* specifying the specific node would help to improve the efficiency of the tree refresh.
* Right now null/undefined being passed in fires off a refresh for the root of the tag tree.
* I wonder if all the parents that have been impacted should be returned from the tag tree
* for a fileDelete.
*/
this._onDidChangeTreeData.fire();
}
}
// TODO: (bdietz) - the method names of getTagsFrom* are kind of misleading because they return a FileInfo object.
/**
* Retrieves tags for a file's text content without accessing the file system.
*
* @param fileContents The document text
* @param filePath The local filesystem path
*/
private getTagsFromFileText(
fileContents: string,
filePath: string
): IFileInfo {
// Parse any yaml frontmatter and check for tags within that frontmatter
const { data } = grayMatter(fileContents);
let yamlTags = new Set();
if (data.tags) {
yamlTags = new Set([data.tags]);
}
return fileContents.split("\n").reduce(
(accumulator, currentLine) => {
if (currentLine.includes("@nested-tags:")) {
// @ts-ignore
const tagsToAdd = currentLine
.split("@nested-tags:")
.pop()
// Do some best effort cleanup on common multi-line comment closing syntax
.replace("-->", "")
.replace("*/", "")
.split(",");
return {
...accumulator,
tags: new Set([...accumulator.tags, ...tagsToAdd])
};
}
return accumulator;
},
// @ts-ignore
{ tags: new Set(...yamlTags), filePath }
);
}
/**
* Retrieves tags for a file on the file system.
*
* @param filePath The local filesystem path
*/
private async getTagsFromFileOnFileSystem(
filePath: string
): Promise<IFileInfo> {
const buffer = await fs.promises.readFile(filePath);
return this.getTagsFromFileText(buffer.toString(), filePath);
}
/**
*
* @param uri
*/
private getPathRelativeToWorkspaceFolder(uri: Uri): string {
const currentWorkspaceFolder = vscode.workspace.getWorkspaceFolder(uri);
const relativePath =
typeof currentWorkspaceFolder !== "undefined"
? path.relative(currentWorkspaceFolder.uri.path, uri.path)
: uri.path;
return relativePath;
}
/**
* Checks to see if a given file uri matches the file extensions that are user configured.
*
* @param uri
*/
private matchesWatchedFileExtensions(uri: Uri) {
const supportedFileExtensions = new Set([
"md",
// @ts-ignore
...vscode.workspace
.getConfiguration()
.get("vscode-nested-tags.additionalFileTypes")
]);
const fileExtension = uri.fsPath.split(".").pop();
return supportedFileExtensions.has(fileExtension);
}
}
export { TagTreeDataProvider }; | the_stack |
import {resolve} from 'path';
import {GitHelper, Command} from '@technote-space/github-action-helper';
import {Logger} from '@technote-space/github-action-log-helper';
import {
getContext,
testEnv,
testFs,
spyOnSpawn,
execCalledWith,
testChildProcess,
setChildProcessParams,
spyOnStdout,
stdoutCalledWith,
} from '@technote-space/github-action-test-helper';
import {getParams} from '../../src/utils/misc';
import {
replaceDirectory,
clone,
checkBranch,
prepareFiles,
createBuildInfoFile,
copyFiles,
config,
getDeleteTestTag,
deleteTestTags,
push,
} from '../../src/utils/command';
const setExists = testFs();
const rootDir = resolve(__dirname, '..', '..');
const logger = new Logger();
const helper = new GitHelper(logger, {token: 'test-token'});
beforeEach(() => {
getParams.clear();
Logger.resetForTesting();
});
describe('replaceDirectory', () => {
testEnv(rootDir);
const workDir = resolve('test-dir/.work');
const buildDir = resolve('test-dir/.work/build');
const pushDir = resolve('test-dir/.work/push');
it('should replace build directory', () => {
process.env.GITHUB_WORKSPACE = 'test-dir';
expect(replaceDirectory(getContext({}))(`git -C ${buildDir} fetch`)).toBe('git fetch');
});
it('should replace build directory', () => {
process.env.GITHUB_WORKSPACE = 'test-dir';
expect(replaceDirectory(getContext({}))(`git -C ${pushDir} fetch`)).toBe('git fetch');
});
it('should replace working directory', () => {
process.env.GITHUB_WORKSPACE = 'test-dir';
expect(replaceDirectory(getContext({}))(`git -C ${workDir} fetch`)).toBe('git fetch');
});
it('should replace directories', () => {
process.env.GITHUB_WORKSPACE = 'test-dir';
expect(replaceDirectory(getContext({}))(`abc ${buildDir} && pqr ${workDir}/xyz ${pushDir}/123`)).toBe('abc <Build Directory> && pqr <Working Directory>/xyz <Push Directory>/123');
});
});
describe('clone', () => {
testEnv(rootDir);
it('should run clone command', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await clone(logger, helper, getContext({
repo: {
owner: 'Hello',
repo: 'World',
},
}));
execCalledWith(mockExec, [
'git init \'.\'',
'git remote add origin \'https://octocat:test-token@github.com/Hello/World.git\' || :',
'git fetch --no-tags origin \'refs/heads/test-branch:refs/remotes/origin/test-branch\' || :',
'git checkout -b test-branch origin/test-branch || :',
'git checkout test-branch || :',
]);
});
});
describe('checkBranch', () => {
testEnv(rootDir);
it('should do nothing', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await checkBranch('test-branch', logger, helper, getContext({}));
expect(mockExec).not.toBeCalled();
});
it('should run git init command', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await checkBranch('test-branch2', logger, helper, getContext({}));
execCalledWith(mockExec, [
'git init \'.\'',
'git checkout --orphan test-branch',
]);
});
});
describe('prepareFiles', () => {
testEnv(rootDir);
const buildDir = resolve('test-dir/.work/build');
const pushDir = resolve('test-dir/.work/push');
const commonCheck: string[] = [
'yarn install --production',
`mv -f '${resolve(buildDir, 'action.yaml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
`mv -f '${resolve(buildDir, 'action.yml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
'rm -rdf .[!.]*',
'rm -rdf *.js',
'rm -rdf *.ts',
'rm -rdf *.json',
'rm -rdf *.lock',
'rm -rdf *.yml',
'rm -rdf *.yaml',
'rm -rdf __tests__ docs src',
`mv -f '${resolve(pushDir, 'action.yml')}' '${resolve(buildDir, 'action.yml')}' > /dev/null 2>&1 || :`,
];
it('should checkout branch', async() => {
process.env.INPUT_PACKAGE_MANAGER = 'yarn';
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await prepareFiles(logger, helper, getContext({
repo: {
owner: 'Hello',
repo: 'World',
},
ref: 'refs/heads/test',
sha: 'test-sha',
}));
execCalledWith(mockExec, ([
'git init \'.\'',
'git remote add origin \'https://octocat:test-token@github.com/Hello/World.git\' || :',
'git fetch --no-tags origin \'refs/heads/test:refs/remotes/origin/test\' || :',
'git checkout -qf test-sha',
] as any[]).concat(commonCheck)); // eslint-disable-line @typescript-eslint/no-explicit-any
});
it('should checkout tag', async() => {
process.env.INPUT_PACKAGE_MANAGER = 'yarn';
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await prepareFiles(logger, helper, getContext({
repo: {
owner: 'Hello',
repo: 'World',
},
ref: 'refs/tags/v1.0-beta+exp.sha.5114f85',
sha: 'test-sha',
}));
execCalledWith(mockExec, ([
'git init \'.\'',
'git remote add origin \'https://octocat:test-token@github.com/Hello/World.git\' || :',
'git fetch --no-tags origin \'refs/tags/v1.0-beta+exp.sha.5114f85:refs/tags/v1.0-beta+exp.sha.5114f85\' || :',
'git checkout -qf test-sha',
] as any[]).concat(commonCheck)); // eslint-disable-line @typescript-eslint/no-explicit-any
});
it('should clean specified targets 1', async() => {
process.env.INPUT_PACKAGE_MANAGER = 'yarn';
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
process.env.INPUT_CLEAN_TARGETS = 'test1,-test2,test3 test4,-test5 , test6;test7, test8/*.txt, *.test9';
const mockExec = spyOnSpawn();
await prepareFiles(logger, helper, getContext({
repo: {
owner: 'Hello',
repo: 'World',
},
ref: 'refs/tags/test',
sha: 'test-sha',
}));
execCalledWith(mockExec, ([
'git init \'.\'',
'git remote add origin \'https://octocat:test-token@github.com/Hello/World.git\' || :',
'git fetch --no-tags origin \'refs/tags/test:refs/tags/test\' || :',
'git checkout -qf test-sha',
'yarn install --production',
`mv -f '${resolve(buildDir, 'action.yaml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
`mv -f '${resolve(buildDir, 'action.yml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
'rm -rdf -- -test2',
'rm -rdf -- -test5',
'rm -rdf test8/*.txt',
'rm -rdf *.test9',
'rm -rdf test1 \'test3 test4\' \'test6;test7\'',
`mv -f '${resolve(pushDir, 'action.yml')}' '${resolve(buildDir, 'action.yml')}' > /dev/null 2>&1 || :`,
]));
});
it('should clean specified targets 2', async() => {
process.env.INPUT_PACKAGE_MANAGER = 'yarn';
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
process.env.INPUT_CLEAN_TARGETS = '-test1, -test2/?<>:|"\'@#$%^& ;.*.test3 , ?<>:|"\'@#$%^& ;/test4 test5/*.txt,;?<>:|"\'@#$%^& ;.txt,rm -rf /';
const mockExec = spyOnSpawn();
await prepareFiles(logger, helper, getContext({
repo: {
owner: 'Hello',
repo: 'World',
},
ref: 'refs/tags/test',
sha: 'test-sha',
}));
execCalledWith(mockExec, ([
'git init \'.\'',
'git remote add origin \'https://octocat:test-token@github.com/Hello/World.git\' || :',
'git fetch --no-tags origin \'refs/tags/test:refs/tags/test\' || :',
'git checkout -qf test-sha',
'yarn install --production',
`mv -f '${resolve(buildDir, 'action.yaml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
`mv -f '${resolve(buildDir, 'action.yml')}' '${resolve(pushDir, 'action.yml')}' > /dev/null 2>&1 || :`,
'rm -rdf -- -test1',
'rm -rdf -- -test2/\\?\\<\\>\\:\\|\\"\\\'\\@\\#\\$\\%\\^\\&\\ \\;.*.test3',
'rm -rdf ?\\<\\>\\:\\|\\"\\\'\\@\\#\\$\\%\\^\\&\\ \\;/test4 test5/*.txt',
'rm -rdf \';?<>:|"\'\\\'\'@#$%^& ;.txt\' \'rm -rf /\'',
`mv -f '${resolve(pushDir, 'action.yml')}' '${resolve(buildDir, 'action.yml')}' > /dev/null 2>&1 || :`,
]));
});
});
describe('createBuildInfoFile', () => {
testEnv(rootDir);
it('should do nothing', async() => {
process.env.INPUT_OUTPUT_BUILD_INFO_FILENAME = '/';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const writeMock = jest.spyOn(require('fs'), 'writeFileSync');
await createBuildInfoFile(logger, getContext({
eventName: 'push',
ref: 'refs/tags/v1.2.3',
}));
expect(writeMock).not.toBeCalled();
});
it('should write file', async() => {
process.env.INPUT_OUTPUT_BUILD_INFO_FILENAME = 'info.json';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const mkdirMock = jest.spyOn(require('fs'), 'mkdirSync');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const writeMock = jest.spyOn(require('fs'), 'writeFileSync');
await createBuildInfoFile(logger, getContext({
eventName: 'push',
ref: 'refs/tags/v1.2.3',
}));
expect(mkdirMock).toBeCalledTimes(1);
expect(writeMock).toBeCalledTimes(1);
});
it('should not create dir', async() => {
process.env.INPUT_OUTPUT_BUILD_INFO_FILENAME = 'info.json';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.GITHUB_WORKSPACE = 'test-dir';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const mkdirMock = jest.spyOn(require('fs'), 'mkdirSync');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const writeMock = jest.spyOn(require('fs'), 'writeFileSync');
setExists(true);
await createBuildInfoFile(logger, getContext({
eventName: 'push',
ref: 'refs/tags/v1.2.3',
}));
expect(mkdirMock).toBeCalledTimes(0);
expect(writeMock).toBeCalledTimes(1);
});
});
describe('copyFiles', () => {
testEnv(rootDir);
it('should run rsync command', async() => {
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await copyFiles(logger, new Command(logger), getContext({}));
const buildDir = resolve('test-dir/.work/build');
const pushDir = resolve('test-dir/.work/push');
execCalledWith(mockExec, [
`rsync -rl --exclude '.git' --delete '${buildDir}/' '${pushDir}'`,
]);
});
});
describe('config', () => {
testEnv(rootDir);
it('should run git config command', async() => {
process.env.GITHUB_WORKSPACE = 'test-dir';
const mockExec = spyOnSpawn();
await config(logger, helper, getContext({}));
execCalledWith(mockExec, [
'git config \'user.name\' \'github-actions[bot]\'',
'git config \'user.email\' \'41898282+github-actions[bot]@users.noreply.github.com\'',
]);
});
});
describe('getDeleteTestTag', () => {
testEnv(rootDir);
testChildProcess();
it('should return empty', async() => {
setChildProcessParams({
stdout: (command: string): string => {
if (command.endsWith('git tag')) {
return '';
}
return '';
},
});
expect(await getDeleteTestTag('v1.2.3', 'test/', helper, getContext({}))).toEqual([]);
});
it('should get delete test tag', async() => {
setChildProcessParams({
stdout: (command: string): string => {
if (command.endsWith('git tag')) {
return 'v1\nv1.2\nv1.2.2\ntest/v0\ntest/v1\ntest/v1.1\ntest/v1.2\ntest/v1.2.2\ntest/v1.2.3\ntest/v1.2.3.1';
}
return '';
},
});
expect(await getDeleteTestTag('v1.2.3', 'test/', helper, getContext({}))).toEqual([
'test/v0',
'test/v1.1',
'test/v1.2.2',
]);
});
it('should get delete original test tag', async() => {
setChildProcessParams({
stdout: (command: string): string => {
if (command.endsWith('git tag')) {
return 'v1\noriginal/v1.2\nv1.2.2\ntest/v0\noriginal/test/v1\ntest/v1.1\ntest/v1.2\noriginal/test/v1.2.2\noriginal/test/v1.2.3\ntest/v1.2.3.1';
}
return '';
},
});
expect(await getDeleteTestTag('v1.2.3', 'original/test/', helper, getContext({}))).toEqual([
'original/test/v1.2.2',
]);
});
});
describe('deleteTestTags', () => {
testEnv(rootDir);
testChildProcess();
const context = getContext({
eventName: 'push',
ref: 'refs/tags/v1.2.3',
repo: {
owner: 'Hello',
repo: 'World',
},
});
const tags = 'v1\nv1.2\nv1.2.2\ntest/v0\noriginal/test/v0\ntest/v1\noriginal/test/v1\ntest/v1.1\noriginal/test/v1.1\ntest/v1.2\noriginal/test/v1.2\ntest/v1.2.2\noriginal/test/v1.2.2\ntest/v1.2.3\noriginal/test/v1.2.3';
it('should do nothing 1', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_ORIGINAL_TAG_PREFIX = 'original/';
process.env.INPUT_CLEAN_TEST_TAG = '1';
const mockExec = spyOnSpawn();
await deleteTestTags(helper, context);
execCalledWith(mockExec, []);
});
it('should do nothing 2', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_TEST_TAG_PREFIX = 'test/';
process.env.INPUT_CLEAN_TEST_TAG = '';
const mockExec = spyOnSpawn();
await deleteTestTags(helper, context);
execCalledWith(mockExec, []);
});
it('should delete test tags', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_TEST_TAG_PREFIX = 'test/';
process.env.INPUT_CLEAN_TEST_TAG = '1';
const mockExec = spyOnSpawn();
setChildProcessParams({
stdout: (command: string): string => {
if (command.endsWith('git tag')) {
return tags;
}
return '';
},
});
await deleteTestTags(helper, context);
execCalledWith(mockExec, [
'git tag',
'git push \'https://octocat:test-token@github.com/Hello/World.git\' --delete tags/test/v0 \'tags/test/v1.1\' \'tags/test/v1.2.2\' || :',
'git tag -d test/v0 \'test/v1.1\' \'test/v1.2.2\' || :',
]);
});
it('should delete original test tags', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.INPUT_TEST_TAG_PREFIX = 'test/';
process.env.INPUT_ORIGINAL_TAG_PREFIX = 'original/';
process.env.INPUT_CLEAN_TEST_TAG = '1';
const mockExec = spyOnSpawn();
setChildProcessParams({
stdout: (command: string): string => {
if (command.endsWith('git tag')) {
return tags;
}
return '';
},
});
await deleteTestTags(helper, context);
execCalledWith(mockExec, [
'git tag',
'git push \'https://octocat:test-token@github.com/Hello/World.git\' --delete tags/test/v0 \'tags/test/v1.1\' \'tags/test/v1.2.2\' || :',
'git tag -d test/v0 \'test/v1.1\' \'test/v1.2.2\' || :',
'git tag',
'git push \'https://octocat:test-token@github.com/Hello/World.git\' --delete tags/original/test/v0 \'tags/original/test/v1.1\' \'tags/original/test/v1.2.2\' || :',
'git tag -d original/test/v0 \'original/test/v1.1\' \'original/test/v1.2.2\' || :',
]);
});
});
describe('push', () => {
testEnv(rootDir);
testChildProcess();
it('should run git push command', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
process.env.INPUT_BRANCH_NAME = 'test-branch';
process.env.INPUT_CLEAN_TEST_TAG = '1';
const mockExec = spyOnSpawn();
const mockStdout = spyOnStdout();
await push(logger, helper, getContext({
eventName: 'push',
ref: 'refs/tags/v1.0-beta+exp.sha.5114f85',
repo: {
owner: 'Hello',
repo: 'World',
},
}));
execCalledWith(mockExec, [
'git tag',
'git tag -d stdout > /dev/null 2>&1',
'git fetch \'https://octocat:test-token@github.com/Hello/World.git\' --tags > /dev/null 2>&1',
'git tag -d \'v1.0.0-beta+exp.sha.5114f85\' \'v1.0-beta+exp.sha.5114f85\' \'v1-beta+exp.sha.5114f85\' || :',
'git tag \'v1.0.0-beta+exp.sha.5114f85\'',
'git tag \'v1.0-beta+exp.sha.5114f85\'',
'git tag \'v1-beta+exp.sha.5114f85\'',
'git push --tags --force \'https://octocat:test-token@github.com/Hello/World.git\' \'test-branch:refs/heads/test-branch\'',
]);
stdoutCalledWith(mockStdout, [
'::group::Pushing to Hello/World@test-branch (tag: v1.0-beta+exp.sha.5114f85)...',
'[command]git fetch origin --tags',
'[command]git tag -d \'v1.0.0-beta+exp.sha.5114f85\' \'v1.0-beta+exp.sha.5114f85\' \'v1-beta+exp.sha.5114f85\'',
' >> stdout',
'[command]git tag \'v1.0.0-beta+exp.sha.5114f85\'',
' >> stdout',
'[command]git tag \'v1.0-beta+exp.sha.5114f85\'',
' >> stdout',
'[command]git tag \'v1-beta+exp.sha.5114f85\'',
' >> stdout',
'[command]git push --tags --force origin test-branch:refs/heads/test-branch',
' >> stdout',
]);
});
it('should run git push command with pushing original tag', async() => {
process.env.INPUT_GITHUB_TOKEN = 'test-token';
process.env.GITHUB_WORKSPACE = 'test-dir';
process.env.INPUT_BRANCH_NAME = 'releases/${MAJOR},test-branch1,test-branch2';
process.env.INPUT_TEST_TAG_PREFIX = 'test/';
process.env.INPUT_ORIGINAL_TAG_PREFIX = 'original/';
process.env.INPUT_CLEAN_TEST_TAG = '1';
const mockExec = spyOnSpawn();
const mockStdout = spyOnStdout();
await push(logger, helper, getContext({
eventName: 'push',
ref: 'refs/tags/test/v1.2.3',
repo: {
owner: 'Hello',
repo: 'World',
},
}));
execCalledWith(mockExec, [
'git tag',
'git tag -d stdout > /dev/null 2>&1',
'git fetch \'https://octocat:test-token@github.com/Hello/World.git\' --tags > /dev/null 2>&1',
'git push \'https://octocat:test-token@github.com/Hello/World.git\' --delete \'tags/original/test/v1.2.3\' || :',
'git tag -d \'original/test/v1.2.3\' || :',
'git tag \'original/test/v1.2.3\' \'test/v1.2.3\'',
'git push \'https://octocat:test-token@github.com/Hello/World.git\' \'refs/tags/original/test/v1.2.3\'',
'git tag',
'git tag -d stdout > /dev/null 2>&1',
'git fetch \'https://octocat:test-token@github.com/Hello/World.git\' --tags > /dev/null 2>&1',
'git tag -d \'test/v1.2.3\' \'test/v1.2\' test/v1 || :',
'git tag \'test/v1.2.3\'',
'git tag \'test/v1.2\'',
'git tag test/v1',
'git push --tags --force \'https://octocat:test-token@github.com/Hello/World.git\' \'releases/v1:refs/heads/releases/v1\'',
'git checkout -b test-branch1',
'git push --force \'https://octocat:test-token@github.com/Hello/World.git\' \'test-branch1:refs/heads/test-branch1\'',
'git checkout -b test-branch2',
'git push --force \'https://octocat:test-token@github.com/Hello/World.git\' \'test-branch2:refs/heads/test-branch2\'',
]);
stdoutCalledWith(mockStdout, [
'::group::Pushing to Hello/World@releases/v1 (tag: test/v1.2.3)...',
'[command]git fetch origin --tags',
'[command]git push origin --delete tags/original/test/v1.2.3',
' >> stdout',
'[command]git tag -d \'original/test/v1.2.3\'',
' >> stdout',
'[command]git tag \'original/test/v1.2.3\' \'test/v1.2.3\'',
' >> stdout',
'[command]git push origin refs/tags/original/test/v1.2.3',
' >> stdout',
'[command]git fetch origin --tags',
'[command]git tag -d \'test/v1.2.3\' \'test/v1.2\' test/v1',
' >> stdout',
'[command]git tag \'test/v1.2.3\'',
' >> stdout',
'[command]git tag \'test/v1.2\'',
' >> stdout',
'[command]git tag test/v1',
' >> stdout',
'[command]git push --tags --force origin releases/v1:refs/heads/releases/v1',
' >> stdout',
'[command]git checkout -b test-branch1',
' >> stdout',
'[command]git push --force origin test-branch1:refs/heads/test-branch1',
' >> stdout',
'[command]git checkout -b test-branch2',
' >> stdout',
'[command]git push --force origin test-branch2:refs/heads/test-branch2',
' >> stdout',
]);
});
}); | the_stack |
declare class EventEmitter {
_events: any;
_eventsCount: number;
/**
* Minimal `EventEmitter` interface that is molded against the Node.js
* `EventEmitter` interface.
*
* @constructor
* @public
*/
constructor();
/**
* Return an array listing the events for which the emitter has registered
* listeners.
*
* @returns {Array}
* @public
*/
eventNames(): any[];
/**
* Return the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Array} The registered listeners.
* @public
*/
listeners(event: any): any[];
/**
* Return the number of listeners listening to a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Number} The number of listeners.
* @public
*/
listenerCount(event: any): any;
/**
* Calls each of the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Boolean} `true` if the event had listeners, else `false`.
* @public
*/
emit(event: any, a1?: any, a2?: any, a3?: any, a4?: any, a5?: any): boolean;
/**
* Add a listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
on(event: any, fn: any, context?: any): any;
/**
* Add a one-time listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
once(event: any, fn: any, context: any): any;
/**
* Remove the listeners of a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn Only remove the listeners that match this function.
* @param {*} context Only remove the listeners that have this context.
* @param {Boolean} once Only remove one-time listeners.
* @returns {EventEmitter} `this`.
* @public
*/
removeListener(event: any, fn: any, context: any, once: any): this;
/**
* Remove all listeners, or those of the specified event.
*
* @param {(String|Symbol)} [event] The event name.
* @returns {EventEmitter} `this`.
* @public
*/
removeAllListeners(event: any): this;
}
/** @ignore */
declare class EurecaPromise<T> extends Promise<T> {
sig: string;
resolve: any;
reject: any;
constructor(executor: (resolve: (value?: T | PromiseLike<T>) => void, reject: (reason?: any) => void) => void);
onReady(onfullfilled: any, onrejected: any): Promise<T>;
}
/**
* Represents a client socket in the server side <br />
* When a new client is connected, a socket is instantiated in the server side allowing the server to exchange data with that client <br />
* In most use cases you don't need to handle this socket directly <br />
* but if you want to implement your own protocole on top of eureca.io you can use this interface to handle raw data.<br />
*
* @interface ISocket
*
*
* @example
* // <h3>Server side</h3>
* var server = new Eureca.Server();
* server.on('connect', function(socket) {
* socket.send('my raw data');
* });
*
* <br />
*
* @example
* // <h3>Client side</h3>
*
* var client = new Eureca.Client();
*
* // See <b>@[ {@link Client#event:unhandledMessage|unhandledMessage event} ]</b>
*
* client.on('unhandledMessage', function (data) {
* console.log(data); // prints : "my raw data"
* });
*
*
*/
interface ISocket {
id: any;
eureca: any;
proxy: any;
contract: any;
context: any;
/**
* Send user data to the client bound to this socket
*
* @function ISocket#send
* @param {any} rawData - data to send (must be serializable type)
*/
send(data: any): any;
close(): any;
isAuthenticated(): boolean;
send(data: any): any;
close(): any;
on(evt: string, callback: (...args: any[]) => any, context?: any): any;
import(name: string): any;
}
/** @ignore */
declare class InvokeContext {
socket: ISocket;
message: any;
async: boolean;
proxy: any;
user: any;
serialize: any;
__erctx__: any;
retId: any;
constructor(socket: ISocket, message: any);
return(result: any, error?: any): void;
sendResult(result: any, error: any, errorcode?: number): void;
}
/** @ignore */
declare class Stub {
settings: any;
private static callbacks;
constructor(settings?: any);
static registerCallBack(sig: any, cb: any): void;
static doCallBack(sig: any, result: any, error: any, socket?: ISocket): void;
invokeRemote(fname: string, importName: string, socket: ISocket, ...args: any[]): EurecaPromise<unknown>;
/**
* Generate proxy functions allowing to call remote functions
*/
importRemoteFunction(socket: ISocket, functions: any, importName: string, filterRx?: RegExp): void;
invokeLocal(invokeContext: InvokeContext, handle: any): void;
}
declare const version = "0.9.0";
/**
* Eureca client class
* This constructor takes an optional settings object
* @constructor Client
* @param {object} [settings] - have the following properties <br />
* @property {uri} settings.uri - Eureca server WS uri, browser client can automatically guess the server URI if you are using a single Eureca server but Nodejs client need this parameter.
* @property {string} [settings.prefix=eureca.io] - This determines the websocket path, it's unvisible to the user but if for some reason you want to rename this path use this parameter.
* @property {int} [settings.retry=20] - Determines max retries to reconnect to the server if the connection is lost.
* @property {boolean} [settings.autoConnect=true] - Estabilish connection automatically after instantiation.<br />if set to False you'll need to call client.connect() explicitly.
* @property {object} [settings.transportSettings] - If defined, all parameters passed here will be sent to the underlying transport settings, this can be used to finetune, or override transport settings.
*
* @example
* **Example of a nodejs client**
* ```javascript
* var Eureca = require('eureca.io');
* var client = new Eureca.Client({ uri: 'ws://localhost:8000/', prefix: 'eureca.io', retry: 3 });
* client.ready(function (serverProxy) {
* // ...
* });
* ```
*
* @example
* **Equivalent browser client**
* ```html
* <!doctype html>
* <html>
* <head>
* <script src="/eureca.js"></script>
* </head>
* <body>
* <script>
* var client = new Eureca.Client({prefix: 'eureca.io', retry: 3 });
* //uri is optional in browser client
* client.ready(function (serverProxy) {
* // ...
* });
* </script>
* </body>
* </html>
* ```
*
* @see authenticate
* @see connect
* @see disconnect
* @see send
* @see isReady
*
*
*/
declare class Client extends EventEmitter {
settings: any;
transport: any;
exports: {};
stub: Stub;
contract: any[];
/** @ignore */
private __eureca_exports__;
/** @ignore */
private __eureca_imports__;
/** @ignore */
private __eureca_imports_received__;
private socket;
private state;
serverProxy: any;
/** @ignore */
private serialize;
/** @ignore */
private deserialize;
constructor(settings?: any);
private loadTransports;
ready(callback: any): Promise<unknown>;
/**
* Send authentication request to the server. <br />
* this can take an arbitrary number of arguments depending on what you defined in the server side <br />
* when the server receive an auth request it'll handle it and return null on auth success, or an error message if something goes wrong <br />
* you need to listed to auth result throught authResponse event
* ** Important ** : it's up to you to define the authenticationmethod in the server side
* @function Client#authenticate
*
* @example
* var client = new Eureca.Client({..});
* //listen to auth response
* client.authResponse(function(result) {
* if (result == null) {
* // ... Auth OK
* }
* else {
* // ... Auth failed
* }
* });
*
* client.ready(function(){
*
* //send auth request
* client.authenticate('your_auth_token');
* });
*/
authenticate(...args: any[]): void;
/**
* connect client
*
*
* @function Client#connect
*
*/
connect(): void;
/**
* close client connection
*
*
* @function Client#disconnect
*
*/
disconnect(): void;
_export(obj: any, name: any, update?: boolean): void;
import(name?: string): any;
private _handleClient;
/**
* Bind a callback to 'update' event @see {@link Client#event:update|Client update event}
* >**Note :** you can also use Client.on('update', callback) to bind update event
*
* @function Client#update
*
*/
update(callback: (any: any) => void): void;
/**
* Bind a callback to 'connect' event
* >**Note :** you can also use Client.on('connect', callback) to bind connect event
*
* @function Client#onConnect
*
*/
onConnect(callback: (any: any) => void): void;
/**
* Bind a callback to 'disconnect' event @see {@link Client#event:disconnect|Client disconnect event}
* >**Note :** you can also use Client.on('disconnect', callback) to bind disconnect event
*
* @function Client#donDisconnect
*
*/
onDisconnect(callback: (any: any) => void): void;
/**
* Bind a callback to 'message' event @see {@link Client#event:message|Client message event}
* >**Note :** you can also use Client.on('message', callback) to bind message event
*
* @function Client#onMessage
*
*/
onMessage(callback: (any: any) => void): void;
/**
* Bind a callback to 'unhandledMessage' event @see {@link Client#event:unhandledMessage|Client unhandledMessage event}
* >**Note :** you can also use Client.on('message', callback) to bind unhandledMessage event
*
* @function Client#onUnhandledMessage
*
*/
onUnhandledMessage(callback: (any: any) => void): void;
/**
* Bind a callback to 'error' event @see {@link Client#event:error|Client error event}
* >**Note :** you can also use Client.on('error', callback) to bind error event
*
* @function Client#onError
*
*/
onError(callback: (any: any) => void): void;
/**
* Bind a callback to 'connectionLost' event
* >**Note :** you can also use Client.on('connectionLost', callback) to bind connectionLost event
*
* @function Client#onConnectionLost
*
*/
onConnectionLost(callback: (any: any) => void): void;
/**
* Bind a callback to 'connectionRetry' event
* >**Note :** you can also use Client.on('connectionRetry', callback) to bind connectionRetry event
*
* @function Client#onConnectionRetry
*
*/
onConnectionRetry(callback: (any: any) => void): void;
/**
* Bind a callback to 'authResponse' event @see {@link Client#event:authResponse|Client authResponse event}
* >**Note :** you can also use Client.on('authResponse', callback) to bind authResponse event
*
* @function Client#onAuthResponse
*
*/
onAuthResponse(callback: (any: any) => void): void;
}
/**
* Eureca server constructor
* This constructor takes an optional settings object
* @constructor Server
* @param {object} [settings] - have the following properties
* @property {string} [settings.transport=engine.io] - can be "engine.io", "sockjs", "websockets", "faye" or "browserchannel" by default "engine.io" is used
* @property {function} [settings.authenticate] - If this function is defined, the client will not be able to invoke server functions until it successfully call the client side authenticate method, which will invoke this function.
* @property {function} [settings.serialize] - If defined, this function is used to serialize the request object before sending it to the client (default is JSON.stringify). This function can be useful to add custom information/meta-data to the transmitted request.
* @property {function} [settings.deserialize] - If defined, this function is used to deserialize the received response string.
* @property {function} [settings.cookieParser] - If defined, middleweare will be used to parse cookies (to use with express cookieParser).
* @property {object} [settings.transportSettings] - If defined, all parameters passed here will be sent to the underlying transport settings, this can be used to finetune, or override transport settings.
*
* @example
* <h4> # default instantiation</h4>
* var Eureca = require('eureca.io');
* //use default transport
* var server = new Eureca.Server();
*
*
* @example
* <h4> # custom transport instantiation </h4>
* var Eureca = require('eureca.io');
* //use websockets transport
* var server = new Eureca.Server({transport:'websockets'});
*
* @example
* <h4> # Authentication </h4>
* var Eureca = require('eureca.io');
*
* var eurecaServer = new Eureca.Server({
* authenticate: function (authToken, next) {
* console.log('Called Auth with token=', authToken);
*
* if (isValidToekn(authToken)) next(); // authentication success
* else next('Auth failed'); //authentication fail
* }
* });
*
* @see attach
* @see getClient
*
*
*/
declare class Server extends EventEmitter {
settings: any;
transport: any;
exports: {};
stub: Stub;
contract: any[];
clients: {};
private appServer;
private __eureca_exports__;
private __eureca_rest__;
private scriptCache;
private useAuthentication;
/**
* Allowed regular expression is used to check the validity of a function received from client
*/
private allowedRx;
private allowedF;
private ioServer;
private serialize;
private deserialize;
constructor(settings?: any);
private loadTransports;
private sendScript;
private _export;
private _handleEurecaClientSocketEvents;
private _handleServer;
/**
* This method is used to get the client proxy of a given connection.
* it allows the server to call remote client function
*
* @function Server#getClient
* @param {String} id - client identifier
* @returns {Proxy}
*
* @example
* //we suppose here that the clients are exposing hello() function
* //onConnect event give the server an access to the client socket
* server.onConnect(function (socket) {
* //get client proxy by socket ID
* var client = server.getClient(socket.id);
* //call remote hello() function.
* client.hello();
* }
*/
getClient(id: any): any;
/**
* returns the client socket
* @param id connection id
*/
getClientSocket(id: any): any;
getConnection(id: any): any;
/**
* Sends exported server functions to all connected clients <br />
*
* @function bind
* @memberof Server#
* @param {appServer} - a nodejs {@link https://nodejs.org/api/http.html#http_class_http_server|nodejs http server}
* or {@link http://expressjs.com/api.html#application|expressjs Application}
*
*/
bind(httpServer: any): void;
attach(appServer: any): void;
getify(fnList?: any): void;
onConnect(callback: (any: any) => void): void;
onDisconnect(callback: (any: any) => void): void;
onMessage(callback: (any: any) => void): void;
onError(callback: (any: any) => void): void;
}
interface IServer {
onconnect(callback: (socket: ISocket) => void): any;
}
declare class Transport {
static _transports: any;
static register(name: string, clientScript: string, createClient: (uri: string, options?: any) => ISocket, createServer: (hook: any, options?: any) => IServer, defaultSerializer?: (data: any) => any, defaultDeserializer?: (data: any) => any): boolean;
static get(name: any): any;
}
declare class Protocol {
static contractFnList: string;
static nsListId: string;
static contractObjId: string;
static command: string;
static authReq: string;
static authResp: string;
static signal: string;
static signalACK: string;
static functionId: string;
static argsId: string;
static resultId: string;
static errorId: string;
static signatureId: string;
static context: string;
}
export { Client, Protocol, Server, Transport, version }; | the_stack |
import {
isWriteableObservable, observable, dependencyDetection
} from '@tko/observable'
import {
isPureComputed, isComputed, computed, pureComputed
} from '../dist'
describe('Pure Computed', function () {
it('Observables should advertise that instances are not pure computed', function () {
var instance = observable()
expect(isPureComputed(instance)).toEqual(false)
})
it('Should advertise that instances are computed', function () {
var computedInstance = pureComputed(function () { })
expect(isComputed(computedInstance)).toEqual(true)
})
it('Should advertise that instances are pure computed', function () {
var instance = pureComputed(function () { })
expect(isPureComputed(instance)).toEqual(true)
})
it('Should advertise that instances are not computed', function () {
var instance = observable()
expect(isComputed(instance)).toEqual(false)
})
it('Should require an evaluator function as constructor param', function () {
expect(function () { pureComputed() }).toThrow()
})
it('Should be able to pass evaluator function using "options" parameter called "read"', function () {
var computedInstance = pureComputed({
read: function () { return 123 }
})
expect(computedInstance()).toEqual(123)
})
it('Should not be able to write a value to it if there is no "write" callback', function () {
var computedInstance = pureComputed(function () { return 123 })
expect(isWriteableObservable(computedInstance)).toEqual(false)
expect(function () { computedInstance(456) }).toThrow()
})
it('Should invoke the "write" callback, where present, if you attempt to write a value to it', function () {
var invokedWriteWithValue
var computedInstance = pureComputed({
read: function () {},
write: function (value) { invokedWriteWithValue = value }
})
expect(isWriteableObservable(computedInstance)).toEqual(true)
computedInstance('some value')
expect(invokedWriteWithValue).toEqual('some value')
})
it('Should describe itself as active initially', function () {
var computedInstance = pureComputed(function () { })
expect(computedInstance.isActive()).toEqual(true)
})
it('Should describe itself as inactive if the evaluator has no dependencies on its first run', function () {
var computedInstance = pureComputed(function () { })
computedInstance() // access the computed to evaluate it
expect(computedInstance.isActive()).toEqual(false)
})
it('Should describe itself as active if the evaluator has dependencies on its first run', function () {
var observableInstance = observable('initial'),
computedInstance = computed(observableInstance)
computedInstance() // access the computed to evaluate it
expect(computedInstance.isActive()).toEqual(true)
})
it('Should evaluate on each access while sleeping when dependencies have changed', function () {
var timesEvaluated = 0,
data = observable('A'),
computedInstance = pureComputed(function () { ++timesEvaluated; return data() })
expect(timesEvaluated).toEqual(0)
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
// Access after changing dependency causes re-evaluation
data('B')
expect(computedInstance()).toEqual('B')
expect(timesEvaluated).toEqual(2)
// Test a second time using peek
data('C')
expect(computedInstance.peek()).toEqual('C')
expect(timesEvaluated).toEqual(3)
// Access without changing dependency does not cause evaluation
expect(computedInstance()).toEqual('C')
expect(timesEvaluated).toEqual(3)
})
it('Should notify "spectator" subscribers whenever the value changes', function () {
var obs = new observable('A')
var computed = pureComputed(obs)
var computed2 = pureComputed(computed)
var notifiedValues = []
computed.subscribe(function (value) {
notifiedValues.push(value)
expect(computed()).toBe(value)
expect(computed2()).toBe(value)
}, null, 'spectate')
expect(notifiedValues).toEqual([])
// Reading the computed for the first time causes a notification
expect(computed()).toEqual('A')
expect(computed2()).toEqual('A')
expect(notifiedValues).toEqual(['A'])
// Reading it a second time doesn't
expect(computed()).toEqual('A')
expect(computed2()).toEqual('A')
expect(notifiedValues).toEqual(['A'])
// Changing the dependency doesn't, but reading the computed again does
obs('B')
expect(notifiedValues).toEqual(['A'])
expect(computed()).toEqual('B')
expect(notifiedValues).toEqual(['A', 'B'])
})
it('Should not subscribe to dependencies while sleeping', function () {
var data = observable('A'),
computedInstance = pureComputed(data)
// Accessing the computed evaluates it
expect(computedInstance()).toEqual('A')
// No subscription is registered on the dependent observable
expect(data.getSubscriptionsCount()).toEqual(0)
// getDependenciesCount returns the correct number
expect(computedInstance.getDependenciesCount()).toEqual(1)
expect(computedInstance.getDependencies()).toEqual([data])
})
it('Should not evaluate after it has been disposed', function () {
var timesEvaluated = 0,
data = observable('A'),
computedInstance = pureComputed(function () { ++timesEvaluated; return data() })
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
computedInstance.dispose()
expect(computedInstance.isActive()).toEqual(false)
// These should not cause a new evaluation
data('B')
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
})
it('Should awaken and perform dependency detection when subscribed to', function () {
var data = observable('A'),
computedInstance = pureComputed(data),
notifiedValues = []
// Subscribe to computed; the dependency should now be tracked
computedInstance.subscribe(function (value) { notifiedValues.push(value) })
expect(data.getSubscriptionsCount()).toEqual(1)
expect(computedInstance.getDependenciesCount()).toEqual(1)
expect(computedInstance.getDependencies()).toEqual([data])
// The subscription should not have sent down the initial value
expect(notifiedValues).toEqual([])
// Updating data should trigger the subscription
data('B')
expect(notifiedValues).toEqual(['B'])
})
it('Should go back to sleep when all subscriptions are disposed', function () {
var data = observable('A'),
computedInstance = pureComputed(data),
subscription = computedInstance.subscribe(function () {})
expect(data.getSubscriptionsCount()).toEqual(1)
expect(computedInstance.getDependenciesCount()).toEqual(1)
expect(computedInstance.getDependencies()).toEqual([data])
// Dispose the subscription to the computed
subscription.dispose()
// It goes to sleep, disposing its subscription to the observable
expect(data.getSubscriptionsCount()).toEqual(0)
expect(computedInstance.getDependenciesCount()).toEqual(1) // dependency count of computed doesn't change
expect(computedInstance.getDependencies()).toEqual([data])
})
it('Should fire "awake" and "asleep" events when changing state', function () {
var data = observable('A'),
computedInstance = pureComputed(data)
var notifySpy = jasmine.createSpy('notifySpy')
computedInstance.subscribe(notifySpy.bind(null, 'awake'), null, 'awake')
computedInstance.subscribe(notifySpy.bind(null, 'asleep'), null, 'asleep')
// Subscribing to non-change events doesn't awaken computed
expect(data.getSubscriptionsCount()).toEqual(0)
// Subscribe to computed; notifies with value
var subscription = computedInstance.subscribe(function () {})
expect(notifySpy.argsForCall).toEqual([ ['awake', 'A'] ])
expect(data.getSubscriptionsCount()).toEqual(1)
notifySpy.reset()
data('B')
expect(notifySpy).not.toHaveBeenCalled()
subscription.dispose()
expect(notifySpy.argsForCall).toEqual([ ['asleep', undefined] ])
expect(data.getSubscriptionsCount()).toEqual(0)
})
it('Should subscribe to dependencies when awakened while minimizing evaluations', function () {
var timesEvaluated = 0,
data = observable('A'),
computedInstance = pureComputed(function () { ++timesEvaluated; return data() }),
notifiedValues = [],
subscribeFunc = function (value) { notifiedValues.push(value) },
subscription
expect(timesEvaluated).toEqual(0)
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
expect(computedInstance.getDependenciesCount()).toEqual(1)
expect(computedInstance.getDependencies()).toEqual([data])
// Subscribing to the computed adds a subscription to the dependency without re-evaluating
subscription = computedInstance.subscribe(subscribeFunc)
expect(data.getSubscriptionsCount()).toEqual(1)
expect(timesEvaluated).toEqual(1)
// Dispose the subscription; reading the sleeping computed doesn't cause re-evaluation
subscription.dispose()
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
// Updating data doesn't trigger re-evaluation (computed is sleeping)
data('B')
expect(timesEvaluated).toEqual(1)
// Subscribing to the computed now does cause a re-evaluation because the dependency was changed
subscription = computedInstance.subscribe(subscribeFunc)
expect(timesEvaluated).toEqual(2)
expect(notifiedValues).toEqual([]) // But nothing notified
// Updating data should re-evaluate and trigger the subscription
data('C')
expect(timesEvaluated).toEqual(3)
expect(notifiedValues).toEqual(['C'])
})
it('Should minimize evaluations when accessed from a computed', function () {
var timesEvaluated = 0,
data = observable('A'),
pureComputedInstance = pureComputed(function () { ++timesEvaluated; return data() }),
computedInstance = computed(pureComputedInstance)
// Should only have evaluated the pure computed once
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
// Updating the dependency evaluates it again
data('B')
expect(computedInstance()).toEqual('B')
expect(timesEvaluated).toEqual(2)
// Double check that disposing subscriptions puts the pure computed to sleep
computedInstance.dispose()
expect(data.getSubscriptionsCount()).toEqual(0)
})
it('Should evaluate latest value when chaining pure computeds', function () {
var data = observable('A'),
computed1 = pureComputed(data),
computed2 = pureComputed(computed1)
expect(computed2()).toEqual('A')
data('B')
expect(computed2()).toEqual('B')
})
it('Should minimize evaluations when chaining pure computeds', function () {
var timesEvaluated = 0,
data = observable('A'),
computed1 = pureComputed(function () { return data() <= 'M' }), // This computed will return the same value for many values of data
computed2 = pureComputed(function () { ++timesEvaluated; return computed1() }) // This computed should only be re-evaluated when computed1 actually changes
expect(computed2()).toEqual(true)
expect(timesEvaluated).toEqual(1)
data('B')
expect(computed2()).toEqual(true)
expect(timesEvaluated).toEqual(1)
data('Z')
expect(computed2()).toEqual(false)
expect(timesEvaluated).toEqual(2)
})
it('Should be able to re-evaluate a sleeping computed that previously threw an exception', function () {
var shouldThrow = observable(false), observableValue = observable(1),
computedInstance = pureComputed(function () {
if (shouldThrow()) {
throw Error('Error during computed evaluation')
} else {
return observableValue()
}
})
expect(computedInstance()).toEqual(1)
observableValue(2)
shouldThrow(true)
expect(computedInstance).toThrow('Error during computed evaluation')
shouldThrow(false)
expect(computedInstance()).toEqual(2)
})
it('Should prevent recursive calling of read function', function () {
// It doesn't really make sense to use the value of a pure computed within itself since there's no way to
// prevent infinite recursion (a pure computed should never alter external state). So expect an error
// if a pure computed is referenced recursively.
var observableInstance = observable('A'),
computedInstance = pureComputed(function () {
return '' + observableInstance() + computedInstance()
})
// While sleeping
expect(computedInstance).toThrow()
// While awake
observableInstance('B') // to ensure re-evaluation
expect(function () {
computedInstance(computedInstance)
}).toThrow()
})
it('Should not add dependencies if disposed during evaluation while sleeping', function () {
// This is a bit of a contrived example and likely won't occur in any actual applications.
// See https://github.com/knockout/knockout/issues/1041
var timesEvaluated = 0,
observableToTriggerDisposal = observable(false),
observableGivingValue = observable('A'),
computedInstance = pureComputed(function () {
if (observableToTriggerDisposal()) { computedInstance.dispose() }
++timesEvaluated
return observableGivingValue()
})
// Check initial state
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(1)
expect(computedInstance.getDependenciesCount()).toEqual(2)
expect(computedInstance.getDependencies()).toEqual([observableToTriggerDisposal, observableGivingValue])
// Now cause a disposal during evaluation
observableToTriggerDisposal(true)
expect(computedInstance()).toEqual('A')
expect(timesEvaluated).toEqual(2)
expect(computedInstance.getDependenciesCount()).toEqual(0)
expect(computedInstance.getDependencies()).toEqual([])
})
it('Should support array tracking using extender', function () {
var myArray = observable(['Alpha', 'Beta', 'Gamma']),
myComputed = pureComputed(function () {
return myArray().slice(-2)
}).extend({trackArrayChanges: true}),
changelist
expect(myComputed()).toEqual(['Beta', 'Gamma'])
// The pure computed doesn't yet subscribe to the observable (it's still sleeping)
expect(myArray.getSubscriptionsCount()).toBe(0)
var arrayChange = myComputed.subscribe(function (changes) {
changelist = changes
}, null, 'arrayChange')
expect(myArray.getSubscriptionsCount()).toBe(1)
myArray(['Alpha', 'Beta', 'Gamma', 'Delta'])
expect(myComputed()).toEqual(['Gamma', 'Delta'])
expect(changelist).toEqual([
{ status: 'deleted', value: 'Beta', index: 0 },
{ status: 'added', value: 'Delta', index: 1 }
])
// It releases subscriptions when the arrayChange subscription is disposed
arrayChange.dispose()
expect(myArray.getSubscriptionsCount()).toBe(0)
})
it('Should reevaluate if dependency was changed during awakening, but not otherwise', function () {
// See https://github.com/knockout/knockout/issues/1975
var data = observable(0),
isEven = pureComputed(function () { return !(data() % 2) }),
timesEvaluated = 0,
pureComputedInstance = pureComputed(function () { ++timesEvaluated; return isEven() }),
subscription
expect(pureComputedInstance()).toEqual(true)
expect(timesEvaluated).toEqual(1)
data(1)
subscription = isEven.subscribe(function () {})
expect(pureComputedInstance()).toEqual(false)
expect(timesEvaluated).toEqual(2)
subscription.dispose()
data(3)
subscription = isEven.subscribe(function () {})
expect(pureComputedInstance()).toEqual(false)
expect(timesEvaluated).toEqual(2)
})
it('Should wake with the correct value when a chained pure computed has side effects for its awake event', function () {
const observableToUpdateOnAwake = observable(null)
const computed1 = pureComputed(observableToUpdateOnAwake)
const computed2 = pureComputed(computed1)
computed1.subscribe(() => observableToUpdateOnAwake('foo'), null, 'awake')
// Reading from the computed before subscribing caused the subscription to
// ignore side-effects from the awake callback of chained pure computeds
computed2()
computed2.subscribe(() => { })
expect(computed2()).toEqual('foo')
})
describe('Should maintain order of subscriptions', function () {
var data, dataPureComputed
function subscribeAndUpdate (computedInstance, newDataValue, expectedNotifiedValues) {
var notifiedValues = []
computedInstance.subscribe(function (value) { notifiedValues.push(value) })
data(newDataValue)
expect(notifiedValues).toEqual(expectedNotifiedValues)
}
beforeEach(function () {
data = observable('A')
computed(data) // This computed ensures that the 'data' observable gets an id number right away
// Because this is a pure computed, it will subscribe to 'data' in response to awakening, such
// as being accessed from another computed. It will also then get a higher id number than 'data'.
dataPureComputed = pureComputed(data)
})
// The following two tests demonstrate that the difference in the order of subscriptions can be tested.
it('base behavior: order is pure computed, observable', function () {
// This one accesses the base observable second, so that the first update happens after both values have been updated
var computedInstance = pureComputed(function () { return dataPureComputed() + data() })
subscribeAndUpdate(computedInstance, 'B', ['BB'])
})
it('base behavior: order is observable, pure computed', function () {
// This one accesses the base observable first, which results in an update before 'dataPureComputed' has updated
var computedInstance = pureComputed(function () { return data() + dataPureComputed() })
subscribeAndUpdate(computedInstance, 'B', ['BA', 'BB'])
})
// This test sets up a pure computed using the first order and checks that the order stays correct
// when awakened after being accessed, such that it's not re-evaluated.
it('when awakening, without re-evaluation', function () {
var timesEvaluated = 0,
computedInstance = pureComputed(function () { ++timesEvaluated; return dataPureComputed() + data() })
// Access the pure computed while it is sleeping to evaluate it and record the dependencies
expect(computedInstance()).toEqual('AA')
expect(timesEvaluated).toEqual(1)
// If the subscriptions happen in the wrong order, we'll get two notifications: 'AB', 'BB'
subscribeAndUpdate(computedInstance, 'B', ['BB'])
expect(timesEvaluated).toEqual(3)
})
})
describe('Context', function () {
it('Should not define initial evaluation', function () {
var observableInstance = observable(1),
evaluationCount = 0,
computedInstance = pureComputed(function () {
++evaluationCount
observableInstance() // for dependency
return dependencyDetection.isInitial()
})
expect(evaluationCount).toEqual(0) // no evaluation yet
expect(computedInstance()).toEqual(undefined) // isInitial is always undefined for a pure computed
expect(evaluationCount).toEqual(1) // single evaluation
observableInstance(2)
computed(computedInstance) // wake up computed by subscribing to it
expect(evaluationCount).toEqual(2) // which causes a second evaluation
expect(computedInstance()).toEqual(undefined) // isInitial is still undefined
})
it('Should accurately report the number of dependencies', function () {
var observable1 = observable(1),
observable2 = observable(1),
evaluationCount = 0,
computedInstance = pureComputed(function () {
// no dependencies at first
expect(dependencyDetection.getDependenciesCount()).toEqual(0)
expect(dependencyDetection.getDependencies()).toEqual([])
// add a single dependency
observable1()
expect(dependencyDetection.getDependenciesCount()).toEqual(1)
expect(dependencyDetection.getDependencies()).toEqual([observable1])
// add a second one
observable2()
expect(dependencyDetection.getDependenciesCount()).toEqual(2)
expect(dependencyDetection.getDependencies()).toEqual([observable1, observable2])
// accessing observable again doesn't affect count
observable1()
expect(dependencyDetection.getDependenciesCount()).toEqual(2)
expect(dependencyDetection.getDependencies()).toEqual([observable1, observable2])
return ++evaluationCount
})
expect(computedInstance()).toEqual(1) // single evaluation
expect(computedInstance.getDependenciesCount()).toEqual(2) // matches value from context
expect(computedInstance.getDependencies()).toEqual([observable1, observable2])
observable1(2)
expect(computedInstance()).toEqual(2) // second evaluation
expect(computedInstance.getDependenciesCount()).toEqual(2) // matches value from context
expect(computedInstance.getDependencies()).toEqual([observable1, observable2])
})
})
}) | the_stack |
import chalk from 'chalk';
import { IEndpoint, IParam } from '@materia/interfaces';
import { App } from '../app';
import { MateriaError } from '../error';
import { Controller } from './controller';
import { Entity } from '../entities/entity';
import { Query } from '../entities/query';
// Not used yet
export enum Method {
GET,
POST,
PUT,
DELETE,
PATCH
}
/*
data format:
{
name: string,
desc: string,
method: string (GET|POST|PUT|DELETE|PATCH)
url: string
base: string
params: array
data: array
action: string (QUERY|JS|SQL)
file: path (if action == CODE)
query: {
entity: string
id: string (queryId)
}
}
*/
export class Endpoint {
name: string;
method: string;
url: string;
fromAddon?: IEndpoint['fromAddon'];
entity: Entity;
params: Array<IParam>;
data: Array<IParam>;
permissions: Array<string>;
controller: string;
action: string;
query: Query;
queryStr: string;
static controllers: {[name: string]: Controller} = {};
static resetControllers(): void {
Endpoint.controllers = {};
}
constructor(private app: App, endpointConfig: IEndpoint) {
this.method = (endpointConfig.method && endpointConfig.method.toLowerCase()) || 'get';
// this.name = endpointConfig.name
// this.desc = endpointConfig.desc
this.url = endpointConfig.url;
this.fromAddon = endpointConfig.fromAddon;
this.params = [];
this.data = [];
this.permissions = endpointConfig.permissions || [];
/*if (typeof endpointConfig.query == 'function') {
this.params = endpointConfig.params || []
this.data = endpointConfig.data || []
this.query = endpointConfig.query
}*/
if (endpointConfig.controller) {
this.controller = endpointConfig.controller;
this.action = endpointConfig.action;
const basePath = this.fromAddon ? this.fromAddon.path : this.app.path;
const ctrl = this._getController();
ctrl.load(basePath, this.controller);
if ( ! ctrl.ctrlClass.prototype[this.action]) {
throw new MateriaError(`Could not find method ${this.action} in controller ${this.controller}.ctrl.js`);
}
this._buildParams(endpointConfig.params);
} else {
let entity_name;
if (typeof endpointConfig.query.entity == 'string') {
entity_name = endpointConfig.query.entity;
}
this.entity = this.app.entities.get(entity_name);
if ( ! this.entity) {
throw new MateriaError('Could not find entity ' + entity_name);
}
this.query = this.entity.getQuery(endpointConfig.query.id);
if ( ! this.query ) {
throw new MateriaError('Could not find query "' + endpointConfig.query.id + '" of entity ' + this.entity.name);
}
this._buildParams(this.query.params);
}
}
getParam(name: string): IParam {
return this.data.find(param => param.name === name);
}
getData(name: string): IParam {
return this.data.find(param => param.name === name);
}
getAllData(onlyRequired?: boolean): IParam[] {
return this.data.filter(param => param.required && onlyRequired || ! onlyRequired);
}
getAllParams(onlyRequired?: boolean): IParam[] {
return this.params.filter(param => param.required && onlyRequired || ! onlyRequired);
}
getRequiredParams(): IParam[] {
return this.getAllParams(true);
}
getRequiredData(): IParam[] {
return this.getAllData(true);
}
handle(req, res, next): Promise<any> {
if (this.method.toLowerCase() !== 'ws') {
return this._handleHttp(req, res, next);
} else {
return this._handleWS(req);
}
}
isInUrl(name) {
return this.url.indexOf(':' + name) != -1;
}
toJson() {
const json = {
name: this.name,
method: this.method,
url: this.url
// base: this.base,
} as IEndpoint;
if (this.controller) {
json.controller = this.controller;
json.action = this.action;
if (this.params.length || this.data.length) {
json.params = [];
}
if (this.params.length) {
this.params.map(param => json.params.push(param));
}
if (this.data.length) {
this.data.map(param => json.params.push(param));
}
} else {
json.query = {
entity: this.query.entity.name,
id: this.query.id
};
}
if (this.permissions.length) {
json.permissions = this.permissions;
}
return json;
}
private _getController(): Controller {
const controller_prefix = this.fromAddon ? this.fromAddon.package + '/' : '';
if ( ! Endpoint.controllers[controller_prefix + this.controller]) {
Endpoint.controllers[controller_prefix + this.controller] = new Controller(this.app);
}
return Endpoint.controllers[controller_prefix + this.controller];
}
private _buildParams(params: Array<any>) {
if ( ! params ) {
return false;
}
this.params = params.map(param => param);
}
private _handleParams(req, params): {resolvedParams: any, errors: MateriaError[]} {
const resolvedParams = Object.assign({}, req.query, req.body, req.params);
const errors: MateriaError[] = [];
if (params.length > 0) {
for (const param of params) {
const v = resolvedParams[param.name];
if (v !== undefined) {
if (param.type == 'text' || param.type == 'string') {
resolvedParams[param.name] = v;
} else if (v == 'null' || v == '') {
resolvedParams[param.name] = null;
} else if (param.type == 'date') {
resolvedParams[param.name] = new Date(v);
} else if (param.type == 'number') {
resolvedParams[param.name] = parseInt(v, 10);
} else if (param.type == 'float') {
resolvedParams[param.name] = parseFloat(v);
} else if (param.type == 'boolean') {
resolvedParams[param.name] = ! ( ! v || typeof v == 'string' && v.toLowerCase() == 'false' );
} else {
resolvedParams[param.name] = v;
}
}
if ( resolvedParams[param.name] == null && param.required) {
const error = new MateriaError(`Missing required parameter: ${param.name}`);
this.app.logger.log(` └── ${error.message}`);
errors.push(error);
}
}
}
return { resolvedParams, errors };
}
private _handleHttp(req, res, next): Promise<any> {
// tslint:disable-next-line:max-line-length
this.app.logger.log(`${chalk.bold('(Endpoint)')} Handle ${this.app.api.getMethodColor(this.method.toUpperCase())} ${chalk.bold(req.url)}`);
const params = this._handleParams(req, this.params);
if (params.errors.length > 0) {
return this.app.actions.handle('beforeEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), false).then(() => {
if (params.errors.length == 1) {
return res.status(500).send({
error: true,
message: params.errors[0].message
});
}
return res.status(500).send({
error: true,
messages: params.errors.map((err) => err.message),
multi: true
});
});
} else {
return this.app.actions.handle('beforeEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), true).then(() => {
this.app.logger.log(` └── Parameters: ${JSON.stringify(params.resolvedParams)}`);
if (this.controller && this.action) {
let obj;
try {
const instance = this._getController().instance();
this.app.logger.log(` └── Execute: (Controller) ${chalk.bold(instance.constructor.name)}.${chalk.bold(this.action)}\n`);
obj = instance[this.action](req, res, next);
} catch (e) {
function fn() {
if ( ! res.headerSent ) {
return res.status(500).json({
error: true,
message: e.message
});
}
}
return this.app.actions.handle('afterEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), false)
.then(() => fn())
.catch(() => fn());
}
if (! res.headerSent && obj && obj.then && obj.catch
&& typeof obj.then === 'function'
&& typeof obj.catch === 'function') {
return obj.then((data) => {
return this.app.actions.handle('afterEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params, data), true).then(() => {
res.status(200).send(data);
});
}).catch(err => res.status(500).send(err));
} else if (res.headerSent) {
return this.app.actions.handle('afterEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), true);
}
} else {
this.app.logger.log(` └── Execute: (Query) ${chalk.bold(this.query.entity.name)}.${chalk.bold(this.query.id)}\n`);
// Get latest query version (Fix issue where query change is not immediately reflected in endpoint result)
this.query = this.app.entities.get(this.entity.name).getQuery(this.query.id);
return this.app.actions.handle('beforeEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), true)
.then(() =>
this.query.run(params.resolvedParams)
)
.then(data => {
return this.app.actions.handle('afterEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params, data), true)
.then(() => {
res.status(200).json(data);
}).catch(() => {
res.status(200).json(data);
});
})
.catch(err => {
return this.app.actions.handle('afterEndpoint', {
method: req.method,
url: req.url
}, Object.assign({}, req.query, req.body, req.params), true)
.then(() => {
res.status(500).send(err);
})
.catch(() => {
res.status(500).send(err);
});
});
}
});
}
}
private _handleWS(wss): Promise<any> {
if (this.controller && this.action) {
try {
const instance = this._getController().instance();
this.app.logger.log(` └── Execute: (Controller) ${chalk.bold(instance.constructor.name)}.${chalk.bold(this.action)}\n`);
instance[this.action](wss);
} catch (e) {
}
}
return Promise.resolve();
}
} | the_stack |
import Config from "../Config";
import logger from "../logger";
import * as EventEmitter from "events";
import PacketUtils from "../PacketUtils";
import {
TcpPacket,
IpProtocol,
EthernetType,
} from "../PacketsStruct";
import ShadowsocksTcpClient from "../shadowsocks/ShadowsocksTcpClient";
import TcpPacketFormatter from "../formatters/TcpPacketFormatter";
const U_INT32_MAXIMUM = 0xffffffff;
interface TcpConnection {
ipversion: EthernetType;
localAddress: Buffer;
localIp: Buffer;
localPort: number;
targetAddress: Buffer;
targetIp: Buffer;
targetPort: number;
targetReceiveWindow: number;
localReceiveWindow: number;
}
enum TcpConnectionState {
HandShake,
HandShake_ACK,
Data,
/* 远端主动FIN */
RemoteCloseWating,
RemoteCloseWating_1,
/* 本端主动FIN */
LocalCloseWating,
LocalCloseWating_1,
Closed,
}
class TcpServerSession extends EventEmitter {
private state: TcpConnectionState;
private shadowsocks: ShadowsocksTcpClient;
private currentIdNum: number;
private currentSeqNum: number = 0;
private currentAckNum: number = 0;
private currentWindowSize: number = 0;
private nextExpectSeqNum: number = 0;
private sendingBuffers: Array<Buffer> = [];
private routers: Array<(data: Buffer, tcpPacket: TcpPacket) => void> = [];
constructor(
public connection: TcpConnection,
private nativeWrite: (data: Buffer) => void) {
super();
this.state = TcpConnectionState.HandShake;
this.currentIdNum = 100;
this.routers[TcpConnectionState.HandShake] = this.tcpHandshark.bind(this);
this.routers[TcpConnectionState.HandShake_ACK] = this.tcpHandshark.bind(this);
this.routers[TcpConnectionState.Data] = this.tcpClientData.bind(this);
this.routers[TcpConnectionState.RemoteCloseWating] = this.tcpClientRequestToClose.bind(this);
this.routers[TcpConnectionState.RemoteCloseWating_1] = this.tcpClientRequestToClose.bind(this);
this.routers[TcpConnectionState.LocalCloseWating] = this.tcpShadowsocksClosed.bind(this);
this.routers[TcpConnectionState.LocalCloseWating_1] = this.tcpShadowsocksClosed.bind(this);
this.shadowsocks = new ShadowsocksTcpClient(
Config.get("ShadowsocksTcpHost"),
Config.get("ShadowsocksTcpPort"),
Config.get("ShadowsocksTcpPasswd"),
Config.get("ShadowsocksTcpMethod"),
);
}
public dataRouter(data: Buffer, tcpPacket: TcpPacket) {
if (this.state === TcpConnectionState.Closed) {
return;
}
if (tcpPacket.RST) {
this.tcpRst(data, tcpPacket);
return;
}
const func = this.routers[this.state];
if (func) {
func(data, tcpPacket);
}
}
public tcpHandshark(data: Buffer, tcpPacket: TcpPacket) {
if (this.state === TcpConnectionState.HandShake) {
if (!tcpPacket.SYN) {
return;
}
this.currentAckNum = tcpPacket.sequenceNumber + 1;
let ack: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
totalLength: 44,
SYN: true,
ACK: true,
options: new Buffer([0x02, 0x04, 0x05, 0x78]),
};
ack = Object.assign(this.buildBaseTcpPacket(), ack);
ack.identification = 0;
const tcpAckpacket: Buffer = TcpPacketFormatter.build(ack);
this.nativeWrite(tcpAckpacket);
this.state = TcpConnectionState.HandShake_ACK;
this.shadowsocks.connect(this.connection.ipversion === EthernetType.IPv4, this.connection.localIp, this.connection.localPort);
this.shadowsocks.on("data", this.tcpShadowsocksData.bind(this));
this.shadowsocks.on("disconnected", this.tcpShadowsocksClosed.bind(this));
return;
}
if (this.state === TcpConnectionState.HandShake_ACK) {
if (tcpPacket.acknowledgmentNumber !== this.currentSeqNum + 1) {
return;
}
this.state = TcpConnectionState.Data;
this.currentAckNum = tcpPacket.sequenceNumber;
this.currentSeqNum = tcpPacket.acknowledgmentNumber;
this.currentWindowSize = tcpPacket.window;
return;
}
}
public tcpClientData(data: Buffer, tcpPacket: TcpPacket) {
if (tcpPacket.FIN) {
this.state = TcpConnectionState.RemoteCloseWating;
this.dataRouter(data, tcpPacket);
return;
}
this.currentAckNum = tcpPacket.sequenceNumber + tcpPacket.payload.length;
if (tcpPacket.payload.length > 0) {
let ack: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
totalLength: 40,
ACK: true,
};
ack = Object.assign(this.buildBaseTcpPacket(), ack);
const tcpAckpacket: Buffer = TcpPacketFormatter.build(ack);
this.nativeWrite(tcpAckpacket);
this.shadowsocks.write(tcpPacket.payload);
}
/* 只接受最新的ack包作为更新窗口大小 */
if (tcpPacket.acknowledgmentNumber === this.currentSeqNum) {
this.currentWindowSize = tcpPacket.window;
this.tcpShadowsocksData();
}
// console.log(`currentSeqNum: ${this.currentSeqNum} currentAckNum: ${this.currentAckNum} currentWindowSize: ${this.currentWindowSize}`);
}
// TcpConnectionState.RemoteCloseWating
public tcpClientRequestToClose(data: Buffer, tcpPacket: TcpPacket) {
if (this.state === TcpConnectionState.RemoteCloseWating) {
this.currentSeqNum = tcpPacket.acknowledgmentNumber;
this.currentAckNum = tcpPacket.sequenceNumber + 1;
let fin: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
totalLength: 40,
ACK: true,
FIN: true,
};
fin = Object.assign(this.buildBaseTcpPacket(), fin);
const tcpFinPacket: Buffer = TcpPacketFormatter.build(fin);
this.shadowsocks.removeAllListeners();
this.shadowsocks.disconnect();
this.sendingBuffers = [];
this.nativeWrite(tcpFinPacket);
this.state = TcpConnectionState.RemoteCloseWating_1;
return;
}
if (this.state === TcpConnectionState.RemoteCloseWating_1) {
this.emit("closed");
this.state = TcpConnectionState.Closed;
return;
}
}
public tcpShadowsocksData(data?: Buffer) {
if (this.state !== TcpConnectionState.Data) {
return;
}
if (data !== undefined) {
this.sendingBuffers = this.sendingBuffers.concat(this.slicePacket(data, 1446));
}
this.shadowsocks.pause(true);
if (this.currentWindowSize <= 0) {
return;
}
if (this.sendingBuffers.length === 0) {
this.shadowsocks.pause(false);
return;
}
const waitingBuffer: Buffer = this.sendingBuffers[0];
if (waitingBuffer.length < this.currentWindowSize) {
this.sendingBuffers.shift();
this.currentWindowSize = this.currentWindowSize - waitingBuffer.length;
this.sendDataPacket(waitingBuffer);
this.tcpShadowsocksData();
} else {
const smallerData: Buffer = waitingBuffer.slice(0, this.currentWindowSize);
this.sendingBuffers[0] = waitingBuffer.slice(this.currentWindowSize);
this.currentWindowSize = 0;
this.sendDataPacket(smallerData);
}
}
public sendDataPacket(data: Buffer, log: boolean = false) {
let dataPacket: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
ACK: true,
PSH: true,
payload: data,
};
dataPacket = Object.assign(this.buildBaseTcpPacket(data.length), dataPacket);
this.currentSeqNum = dataPacket.sequenceNumber + dataPacket.payload.length;
if(this.currentSeqNum >= U_INT32_MAXIMUM) {
this.currentSeqNum = this.currentSeqNum - U_INT32_MAXIMUM - 1;
}
const fullPacket = TcpPacketFormatter.build(dataPacket);
this.nativeWrite(fullPacket);
}
public tcpShadowsocksClosed(data: Buffer, tcpPacket: TcpPacket) {
if (this.state === TcpConnectionState.Data) {
this.sendingBuffers = [];
this.shadowsocks.removeAllListeners();
this.state = TcpConnectionState.LocalCloseWating;
let fin: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
totalLength: 40,
ACK: true,
FIN: true,
};
fin = Object.assign(this.buildBaseTcpPacket(), fin);
const tcpFinPacket: Buffer = TcpPacketFormatter.build(fin);
this.nativeWrite(tcpFinPacket);
this.state = TcpConnectionState.LocalCloseWating_1;
return;
}
if (this.state === TcpConnectionState.LocalCloseWating_1 && tcpPacket.FIN && tcpPacket.ACK) {
this.currentSeqNum = tcpPacket.acknowledgmentNumber;
this.currentAckNum = tcpPacket.sequenceNumber + 1;
let fin: TcpPacket = {
sequenceNumber: this.currentSeqNum,
acknowledgmentNumber: this.currentAckNum,
totalLength: 40,
ACK: true,
};
fin = Object.assign(this.buildBaseTcpPacket(), fin);
const tcpFinPacket: Buffer = TcpPacketFormatter.build(fin);
this.nativeWrite(tcpFinPacket);
this.state = TcpConnectionState.Closed;
this.emit("closed");
}
}
public tcpRst(data: Buffer, tcpPacket: TcpPacket) {
this.sendingBuffers = [];
this.shadowsocks.removeAllListeners();
this.shadowsocks.destroy();
this.state = TcpConnectionState.Closed;
this.emit("closed");
}
public buildBaseTcpPacket(dataLength: number = 0): TcpPacket {
this.currentIdNum = PacketUtils.increaseNumber(this.currentIdNum, 65536);
return {
type: this.connection.ipversion,
version: this.connection.ipversion === EthernetType.IPv4 ? 4 : 6,
TTL: 64,
protocol: IpProtocol.TCP,
sourceIp: this.connection.localIp,
destinationIp: this.connection.targetIp,
sourceAddress: this.connection.localAddress,
destinaltionAddress: this.connection.targetAddress,
sourcePort: this.connection.localPort,
destinationPort: this.connection.targetPort,
window: this.connection.localReceiveWindow,
totalLength: 40 + dataLength,
identification: this.currentIdNum,
TOS: 0,
};
}
public slicePacket(data: Buffer, sliceSize: number): Array<Buffer> {
const bufsArray: Array<Buffer> = [];
if (data.length <= sliceSize) {
bufsArray.push(data);
return bufsArray;
}
let sliceCount = data.length / sliceSize;
// is float.
if (sliceCount !== Math.floor(sliceCount)) {
sliceCount++;
}
sliceCount = Math.floor(sliceCount);
let lastIndex: number = 0;
let targetIndex: number = sliceSize;
for (let i = 0; i < sliceCount; i++) {
bufsArray.push(data.slice(lastIndex, targetIndex));
lastIndex = targetIndex;
targetIndex += sliceSize;
if (targetIndex > data.length) {
targetIndex = data.length;
}
}
return bufsArray;
}
// tslint:disable-next-line:member-ordering
public toString(): string {
let str = "";
str += `localAddress: ${this.connection.localAddress.map((x) => x.toString(16) as any).join(":")}\n`;
str += `targetAddress: ${this.connection.targetAddress.map((x) => x.toString(16) as any).join(":")}\n`;
str += `localIp: ${PacketUtils.ipv4ToString(this.connection.localIp)}\n`;
str += `targetIp: ${PacketUtils.ipv4ToString(this.connection.targetIp)}\n`;
str += `localPort: ${this.connection.localPort}\n`;
str += `targetPort: ${this.connection.targetPort}`;
return str;
}
}
const connections = new Map<string, TcpServerSession>();
export default function(data: Buffer, write: (data: Buffer) => void, next: () => void) {
if (PacketUtils.isIPv4(data)) {
if (!PacketUtils.isTCP(data)) { return next(); }
} else if (PacketUtils.isIPv6(data)) {
if (!PacketUtils.isTCPForIpv6(data)) { return next(); }
} else {
return next();
}
const tcpPacket: TcpPacket = TcpPacketFormatter.format(data);
const tcpConnectionId: string = PacketUtils.getConnectionId(tcpPacket);
let session = connections.get(tcpConnectionId);
if (session === undefined || session == null) {
session = new TcpServerSession({
ipversion: tcpPacket.version === 4 ? EthernetType.IPv4 : EthernetType.IPv6,
localAddress: tcpPacket.sourceAddress,
targetAddress: tcpPacket.destinaltionAddress,
localIp: tcpPacket.destinationIp,
targetIp: tcpPacket.sourceIp,
localPort: tcpPacket.destinationPort,
targetPort: tcpPacket.sourcePort,
localReceiveWindow: 65535,
targetReceiveWindow: tcpPacket.window,
}, write);
session.once("closed", () => {
delete connections[tcpConnectionId];
logger.debug(`关闭TCP链接: ${PacketUtils.ipv4ToString(tcpPacket.destinationIp)}:${tcpPacket.destinationPort} NAT:${tcpConnectionId}`);
});
connections.set(tcpConnectionId, session);
logger.debug(`创建TCP链接: ${PacketUtils.ipv4ToString(tcpPacket.destinationIp)}:${tcpPacket.destinationPort} NAT:${tcpConnectionId}`);
}
session.dataRouter(data, tcpPacket);
} | the_stack |
import { editorExtrasTag, toRadian } from '../../cocos/core';
import { RealCurve, RealInterpolationMode } from '../../cocos/core/curves';
import { EasingMethod, RealKeyframeValue } from '../../cocos/core/curves/curve';
import { ExtrapolationMode, TangentWeightMode } from '../../cocos/core/curves/real-curve-param';
import { createRealKeyframeValueLike } from './curve-test-utils';
import { serializeAndDeserialize } from './serialize-and-deserialize-curve';
describe('Curve', () => {
test('assign sorted', () => {
const curve = new RealCurve();
curve.assignSorted([0.1, 0.2, 0.3], [
realKeyframeWithoutTangent(0.4),
realKeyframeWithoutTangent(0.5),
realKeyframeWithoutTangent(0.6),
]);
// Assign empty(keys, values)
curve.assignSorted([], []);
expect(curve.keyFramesCount).toBe(0);
// Assign empty(frames)
curve.assignSorted([]);
expect(curve.keyFramesCount).toBe(0);
// Assign(keys, values), values can be number of partial RealKeyframeValue
curve.assignSorted([0.1, 0.2, 0.3], [-3.6, { value: 2.7 }, { value: 3.8, interpolationMode: RealInterpolationMode.CUBIC }]);
expect(Array.from(curve.times())).toStrictEqual([0.1, 0.2, 0.3]);
expect(Array.from(curve.values())).toStrictEqual([
createRealKeyframeValueLike({ value: -3.6 }),
createRealKeyframeValueLike({ value: 2.7 }),
createRealKeyframeValueLike({ value: 3.8, interpolationMode: RealInterpolationMode.CUBIC }),
]);
// The count of keys and values should be same, if not, the behavior is undefined.
// In test mode, assertion error would be thrown.
expect(() => curve.assignSorted([0.1, 0.2], [])).toThrow();
// Keys should be sorted, if not, the behavior is undefined.
// In test mode, assertion error would be thrown.
expect(() => curve.assignSorted(
[0.2, 0.1],
[realKeyframeWithoutTangent(0.4), realKeyframeWithoutTangent(0.5)],
)).toThrow();
expect(() => curve.assignSorted([
[0.2, realKeyframeWithoutTangent(0.4)],
[0.1, realKeyframeWithoutTangent(0.5)],
])).toThrow();
});
describe('serialization', () => {
test('Normal', () => {
const curve = new RealCurve();
curve.assignSorted([0.1, 0.2, 0.3], [
({ value: 0.4, rightTangent: 0.0, leftTangent: 0.0, interpolationMode: RealInterpolationMode.CONSTANT }),
({ value: 0.5, rightTangent: 0.0, leftTangent: 0.0, interpolationMode: RealInterpolationMode.LINEAR }),
({
value: 0.6,
rightTangent: 0.487,
rightTangentWeight: 0.2,
leftTangent: 0.4598,
leftTangentWeight: 0.32,
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.BOTH,
easingMethod: EasingMethod.QUAD_OUT,
[editorExtrasTag]: { 'foo': 'bar' },
}),
]);
compareCurves(serializeAndDeserialize(curve, RealCurve), curve);
});
test('Optimized for linear curve', () => {
const curve = new RealCurve();
curve.assignSorted([0.1, 0.2, 0.3], [
realKeyframeWithoutTangent(0.4),
realKeyframeWithoutTangent(0.5),
realKeyframeWithoutTangent(0.6),
]);
compareCurves(serializeAndDeserialize(curve, RealCurve), curve);
});
test('Optimized for constant curve', () => {
const curve = new RealCurve();
curve.assignSorted([0.1, 0.2, 0.3], [
realKeyframeWithoutTangent(0.4, RealInterpolationMode.CONSTANT),
realKeyframeWithoutTangent(0.5, RealInterpolationMode.CONSTANT),
realKeyframeWithoutTangent(0.6, RealInterpolationMode.CONSTANT),
]);
compareCurves(serializeAndDeserialize(curve, RealCurve), curve);
});
});
test('Default keyframe value', () => {
const curve = new RealCurve();
curve.addKeyFrame(0, {});
const keyframeValue = curve.getKeyframeValue(0);
expect(keyframeValue.value).toBe(0.0);
expect(keyframeValue.interpolationMode).toBe(RealInterpolationMode.LINEAR);
expect(keyframeValue.rightTangent).toBe(0.0);
expect(keyframeValue.rightTangentWeight).toBe(0.0);
expect(keyframeValue.leftTangent).toBe(0.0);
expect(keyframeValue.leftTangentWeight).toBe(0.0);
expect(keyframeValue.easingMethod).toBe(EasingMethod.LINEAR);
});
describe('Evaluation', () => {
test('Empty curve', () => {
const curve = new RealCurve();
expect(curve.evaluate(12.34)).toBe(0.0);
});
test('Interpolation mode: constant', () => {
const curve = new RealCurve();
curve.assignSorted([
[0.2, createRealKeyframeValueLike({ value: 0.7, interpolationMode: RealInterpolationMode.CONSTANT, })],
[0.4, createRealKeyframeValueLike({ value: 0.8, interpolationMode: RealInterpolationMode.LINEAR, })],
]);
expect(curve.evaluate(0.28)).toBe(0.7);
});
test('Interpolation mode: linear', () => {
const curve = new RealCurve();
curve.assignSorted([
[0.2, createRealKeyframeValueLike({ value: 0.7, interpolationMode: RealInterpolationMode.LINEAR, })],
[0.4, createRealKeyframeValueLike({ value: 0.8, interpolationMode: RealInterpolationMode.CONSTANT, })],
]);
expect(curve.evaluate(0.28)).toBeCloseTo(0.74);
});
test('Interpolation mode: cubic; Both weights are unused', () => {
const curve = new RealCurve();
curve.assignSorted([
[0.2, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.NONE,
value: 0.7,
rightTangent: Math.tan(toRadian(30.0)),
})],
[0.4, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.NONE,
value: 0.8,
leftTangent: Math.tan(toRadian(30.0)),
})],
]);
expect(curve.evaluate(0.28)).toBeCloseTo(0.74074, 5);
});
test('Interpolation mode: cubic; Start weight is used', () => {
const curve = new RealCurve();
curve.assignSorted([
[0.2, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.RIGHT,
value: 0.7,
rightTangent: Math.tan(toRadian(30.0)),
})],
[0.4, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.NONE,
value: 0.8,
leftTangent: Math.tan(toRadian(30.0)),
})],
]);
expect(curve.evaluate(0.28)).toBeCloseTo(0.73799, 5);
});
test('Interpolation mode: cubic; End weight is used', () => {
const curve = new RealCurve();
curve.assignSorted([
[0.2, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.NONE,
value: 0.7,
rightTangent: Math.tan(toRadian(30.0)),
})],
[0.4, createRealKeyframeValueLike({
interpolationMode: RealInterpolationMode.CUBIC,
tangentWeightMode: TangentWeightMode.LEFT,
value: 0.8,
leftTangent: Math.tan(toRadian(30.0)),
})],
]);
expect(curve.evaluate(0.28)).toBeCloseTo(0.74229, 5);
});
test('Extrapolation mode: clamp', () => {
const curve = new RealCurve();
curve.preExtrapolation = ExtrapolationMode.CLAMP;
curve.postExtrapolation = ExtrapolationMode.CLAMP;
curve.assignSorted([
[0.2, ({ value: 5.0 })],
]);
// Fall back to clamp
expect(curve.evaluate(0.05)).toBeCloseTo(5.0);
expect(curve.evaluate(0.46)).toBeCloseTo(5.0);
});
test('Extrapolation mode: linear', () => {
const curve = new RealCurve();
curve.preExtrapolation = ExtrapolationMode.LINEAR;
curve.postExtrapolation = ExtrapolationMode.LINEAR;
curve.assignSorted([
[0.2, ({ value: 5.0 })],
[0.3, ({ value: 3.14 })],
]);
expect(curve.evaluate(0.05)).toBeCloseTo(7.79);
expect(curve.evaluate(-102.4)).toBeCloseTo(1913.36);
expect(curve.evaluate(0.46)).toBeCloseTo(0.164);
curve.assignSorted([
[0.2, ({ value: 5.0 })],
]);
// Fall back to clamp
expect(curve.evaluate(0.05)).toBeCloseTo(5.0);
expect(curve.evaluate(0.46)).toBeCloseTo(5.0);
});
test('Extrapolation mode: loop', () => {
const curve = new RealCurve();
curve.preExtrapolation = ExtrapolationMode.LOOP;
curve.postExtrapolation = ExtrapolationMode.LOOP;
curve.assignSorted([
[0.2, ({ value: 5.0 })],
[0.36, ({ value: 3.14 })],
]);
expect(curve.evaluate(-2.7)).toBeCloseTo(curve.evaluate(0.34));
expect(curve.evaluate(4.6)).toBeCloseTo(curve.evaluate(0.28));
curve.assignSorted([
[0.2, ({ value: 5.0 })],
]);
// Fall back to clamp
expect(curve.evaluate(0.05)).toBeCloseTo(5.0);
expect(curve.evaluate(0.46)).toBeCloseTo(5.0);
});
test('Extrapolation mode: ping-pong', () => {
const curve = new RealCurve();
curve.preExtrapolation = ExtrapolationMode.PING_PONG;
curve.postExtrapolation = ExtrapolationMode.PING_PONG;
curve.assignSorted([
[0.2, ({ value: 5.0 })],
[0.36, ({ value: 3.14 })],
]);
expect(curve.evaluate(-2.7)).toBeCloseTo(curve.evaluate(0.22));
expect(curve.evaluate(4.6)).toBeCloseTo(curve.evaluate(0.28));
expect(curve.evaluate(4.77)).toBeCloseTo(curve.evaluate(0.29));
curve.assignSorted([
[0.2, ({ value: 5.0 })],
]);
// Fall back to clamp
expect(curve.evaluate(0.05)).toBeCloseTo(5.0);
expect(curve.evaluate(0.46)).toBeCloseTo(5.0);
});
});
});
function realKeyframeWithoutTangent (value: number, interpolationMode: RealInterpolationMode = RealInterpolationMode.LINEAR): Partial<RealKeyframeValue> {
return createRealKeyframeValueLike({
value,
interpolationMode,
});
}
function compareCurves (left: RealCurve, right: RealCurve, numDigits = 2) {
expect(left.keyFramesCount).toBe(right.keyFramesCount);
for (let iKeyframe = 0; iKeyframe < left.keyFramesCount; ++iKeyframe) {
expect(left.getKeyframeTime(iKeyframe)).toBeCloseTo(right.getKeyframeTime(iKeyframe), numDigits);
const leftKeyframeValue = left.getKeyframeValue(iKeyframe);
const rightKeyframeValue = right.getKeyframeValue(iKeyframe);
expect(leftKeyframeValue.value).toBeCloseTo(rightKeyframeValue.value, numDigits);
expect(leftKeyframeValue.rightTangent).toBeCloseTo(rightKeyframeValue.rightTangent, numDigits);
expect(leftKeyframeValue.rightTangentWeight).toBeCloseTo(rightKeyframeValue.rightTangentWeight, numDigits);
expect(leftKeyframeValue.leftTangent).toBeCloseTo(rightKeyframeValue.leftTangent, numDigits);
expect(leftKeyframeValue.leftTangentWeight).toBeCloseTo(rightKeyframeValue.leftTangentWeight, numDigits);
expect(leftKeyframeValue.interpolationMode).toStrictEqual(rightKeyframeValue.interpolationMode);
expect(leftKeyframeValue.easingMethod).toStrictEqual(rightKeyframeValue.easingMethod);
expect(leftKeyframeValue[editorExtrasTag]).toStrictEqual(rightKeyframeValue[editorExtrasTag]);
}
} | the_stack |
import Vue from 'vue'
// Components
import VSelect from '../VSelect'
import {
VListItem,
VListItemTitle,
VListItemContent,
} from '../../VList'
// Utilities
import {
mount,
Wrapper,
} from '@vue/test-utils'
// eslint-disable-next-line max-statements
describe('VSelect.ts', () => {
type Instance = InstanceType<typeof VSelect>
let mountFunction: (options?: object) => Wrapper<Instance>
let el
beforeEach(() => {
el = document.createElement('div')
el.setAttribute('data-app', 'true')
document.body.appendChild(el)
mountFunction = (options = {}) => {
return mount(VSelect, {
// https://github.com/vuejs/vue-test-utils/issues/1130
sync: false,
mocks: {
$vuetify: {
lang: {
t: (val: string) => val,
},
theme: {
dark: false,
},
},
},
...options,
})
}
})
afterEach(() => {
document.body.removeChild(el)
})
it('should return numeric 0', async () => {
const item = { value: 0, text: '0' }
const wrapper = mountFunction({
propsData: {
value: null,
items: [item],
multiple: true,
},
})
const change = jest.fn()
wrapper.vm.$on('change', change)
wrapper.vm.selectItem(item)
await wrapper.vm.$nextTick()
expect(change).toHaveBeenCalledWith([0])
})
it('should disable list items', () => {
const wrapper = mountFunction({
propsData: {
eager: true,
items: [{
text: 'item',
disabled: true,
}],
},
})
const item = wrapper.find('.v-list-item--disabled')
expect(item.element.tabIndex).toBe(-1)
})
it('should render v-select correctly when using v-list-item in item scope slot', async () => {
const items = Array.from({ length: 2 }, (x, i) => ({ value: i, text: `Text ${i}` }))
const vm = new Vue({
components: {
VListItem,
},
})
const itemSlot = ({ item, attrs, on }) => vm.$createElement('v-list-item', {
on,
...attrs,
class: item.value % 2 === 0 ? '' : 'red lighten-1',
}, [
item.text,
])
const selectionSlot = ({ item }) => vm.$createElement('v-list-item', item.value)
const component = Vue.component('test', {
render (h) {
return h(VSelect, {
props: { items, value: 1 },
scopedSlots: {
item: itemSlot,
selection: selectionSlot,
},
})
},
})
const wrapper = mountFunction(component)
wrapper.vm.$children[0].inputValue = items[0]
await wrapper.vm.$nextTick()
expect(wrapper.html()).toMatchSnapshot()
})
it('should render v-select correctly when not using v-list-item in item scope slot', async () => {
const items = Array.from({ length: 2 }, (x, i) => ({ value: i, text: `Text ${i}` }))
const vm = new Vue({
components: {
VListItemTitle,
VListItemContent,
},
})
const itemSlot = ({ item }) => vm.$createElement('v-list-item-content', {
class: item.value % 2 === 0 ? '' : 'red lighten-1',
}, [
vm.$createElement('v-list-item-title', [item.value]),
])
const component = Vue.component('test', {
render (h) {
return h(VSelect, {
props: { items },
scopedSlots: { item: itemSlot },
})
},
})
const wrapper = mountFunction(component)
wrapper.vm.$children[0].inputValue = items[0]
await wrapper.vm.$nextTick()
expect(wrapper.html()).toMatchSnapshot()
})
it('should render v-select correctly when not using scope slot', async () => {
const items = Array.from({ length: 2 }, (x, i) => ({ value: i, text: `Text ${i}` }))
const component = Vue.component('test', {
render (h) {
return h(VSelect, {
props: { items },
})
},
})
const wrapper = mountFunction(component)
wrapper.vm.$children[0].inputValue = items[0]
await wrapper.vm.$nextTick()
expect(wrapper.html()).toMatchSnapshot()
})
it('should not close menu when using multiple prop', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
items: [1, 2, 3, 4],
multiple: true,
},
})
const blur = jest.fn()
wrapper.vm.$on('blur', blur)
const menu = wrapper.find('.v-input__slot')
menu.trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.vm.isFocused).toBe(true)
expect(wrapper.vm.isMenuActive).toBe(true)
const item = wrapper.find('.v-list-item')
item.trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.vm.isMenuActive).toBe(true)
})
it('should render aria-hidden=true on arrow icon', async () => {
const wrapper = mountFunction()
const icon = wrapper.find('.v-icon')
expect(icon.attributes('aria-hidden')).toBe('true')
})
// TODO: this fails without sync, nextTick doesn't help
// https://github.com/vuejs/vue-test-utils/issues/1130
it.skip('should only show items if they are in items', async () => {
const wrapper = mountFunction({
propsData: {
value: 'foo',
items: ['foo'],
},
})
await wrapper.vm.$nextTick()
expect(wrapper.vm.internalValue).toEqual('foo')
expect(wrapper.vm.selectedItems).toEqual(['foo'])
expect(wrapper.html()).toMatchSnapshot()
wrapper.setProps({ value: 'bar' })
await wrapper.vm.$nextTick()
expect(wrapper.vm.internalValue).toEqual('bar')
expect(wrapper.vm.selectedItems).toEqual([])
expect(wrapper.html()).toMatchSnapshot()
wrapper.setProps({ items: ['foo', 'bar'] })
await wrapper.vm.$nextTick()
expect(wrapper.vm.internalValue).toEqual('bar')
expect(wrapper.vm.selectedItems).toEqual(['bar'])
expect(wrapper.html()).toMatchSnapshot()
wrapper.setProps({ multiple: true })
await wrapper.vm.$nextTick()
wrapper.setProps({ value: ['foo', 'bar'] })
await wrapper.vm.$nextTick()
expect(wrapper.vm.internalValue).toEqual(['foo', 'bar'])
expect(wrapper.vm.selectedItems).toEqual(['foo', 'bar'])
expect(wrapper.html()).toMatchSnapshot()
})
// TODO: this fails without sync, nextTick doesn't help
// https://github.com/vuejs/vue-test-utils/issues/1130
it.skip('should update the displayed value when items changes', async () => {
const wrapper = mountFunction({
propsData: {
value: 1,
items: [],
},
})
wrapper.setProps({ items: [{ text: 'foo', value: 1 }] })
await wrapper.vm.$nextTick()
expect(wrapper.vm.selectedItems).toContainEqual({ text: 'foo', value: 1 })
})
it('should render select menu with content class', async () => {
const items = ['abc']
const wrapper = mountFunction({
propsData: {
menuProps: { contentClass: 'v-menu-class', eager: true },
items,
},
})
const menu = wrapper.find('.v-menu__content')
expect(menu.element.classList).toContain('v-menu-class')
})
it('should have deletable chips', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
chips: true,
deletableChips: true,
items: ['foo', 'bar'],
value: 'foo',
},
})
await wrapper.vm.$nextTick()
const chip = wrapper.find('.v-chip')
expect(!!chip).toBe(true)
})
it('should escape items in menu', async () => {
const wrapper = mountFunction({
propsData: {
eager: true,
items: ['<strong>foo</strong>'],
},
})
const tileTitle = wrapper.find('.v-list-item__title')
expect(tileTitle.html()).toMatchSnapshot()
})
it('should use value comparator', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
multiple: true,
items: [
{ text: 'one', value: 1 },
{ text: 'two', value: 2 },
{ text: 'three', value: 3 },
],
itemText: 'text',
itemValue: 'value',
valueComparator: (a, b) => Math.round(a) === Math.round(b),
value: [3.1],
},
})
expect(wrapper.vm.selectedItems).toHaveLength(1)
expect(wrapper.vm.selectedItems[0].value).toBe(3)
})
it('should not open if readonly', async () => {
const wrapper = mountFunction({
propsData: {
readonly: true,
items: ['foo', 'bar'],
},
})
wrapper.trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.vm.isMenuActive).toBe(false)
wrapper.find('.v-input__append-inner').trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.vm.isMenuActive).toBe(false)
})
it('can use itemValue as function', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
multiple: true,
items: [
{ text: 'one', v1: 'prop v1' },
{ text: 'two', v2: 'prop v2' },
{ text: 'three', v1: 'also prop v1' },
],
itemText: 'text',
itemValue: item => item.hasOwnProperty('v1') ? item.v1 : item.v2,
value: ['prop v1', 'prop v2'],
},
})
expect(wrapper.vm.selectedItems).toHaveLength(2)
expect(wrapper.vm.getValue(wrapper.vm.selectedItems[0])).toBe('prop v1')
expect(wrapper.vm.getValue(wrapper.vm.selectedItems[1])).toBe('prop v2')
})
it('should work correctly with return-object', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
multiple: false,
returnObject: true,
items: [
{ text: 'one', value: { x: [1, 2], y: ['a', 'b'] } },
{ text: 'two', value: { x: [3, 4], y: ['a', 'b'] } },
{ text: 'three', value: { x: [1, 2], y: ['a', 'c'] } },
],
itemText: 'text',
itemValue: 'value',
value: { text: 'two', value: { x: [3, 4], y: ['a', 'b'] } },
},
})
expect(wrapper.vm.selectedItems).toHaveLength(1)
expect(wrapper.vm.internalValue).toEqual({ text: 'two', value: { x: [3, 4], y: ['a', 'b'] } })
})
it('should work correctly with return-object [multiple]', async () => {
const wrapper = mountFunction({
attachToDocument: true,
propsData: {
multiple: true,
returnObject: true,
items: [
{ text: 'one', value: { x: [1, 2], y: ['a', 'b'] } },
{ text: 'two', value: { x: [3, 4], y: ['a', 'b'] } },
{ text: 'three', value: { x: [1, 2], y: ['a', 'c'] } },
],
itemText: 'text',
itemValue: 'value',
value: [
{ text: 'two', value: { x: [3, 4], y: ['a', 'b'] } },
{ text: 'one', value: { x: [1, 2], y: ['a', 'b'] } },
],
},
})
expect(wrapper.vm.selectedItems).toHaveLength(2)
expect(wrapper.vm.internalValue[0]).toEqual({ text: 'two', value: { x: [3, 4], y: ['a', 'b'] } })
expect(wrapper.vm.internalValue[1]).toEqual({ text: 'one', value: { x: [1, 2], y: ['a', 'b'] } })
})
it('should provide the correct default value', () => {
const wrapper = mountFunction()
expect(wrapper.vm.internalValue).toBeUndefined()
const wrapper2 = mountFunction({
propsData: { multiple: true },
})
expect(wrapper2.vm.internalValue).toEqual([])
})
it('should use slotted no-data', () => {
const wrapper = mountFunction({
propsData: {
eager: true,
items: ['foo'],
},
slots: {
'no-data': [{
render: h => h('div', 'foo'),
}],
},
})
const list = wrapper.find('.v-list')
expect(wrapper.vm.$slots['no-data']).toBeTruthy()
expect(list.html()).toMatchSnapshot()
})
it('should change autocomplete attribute', () => {
const wrapper = mountFunction({
attrs: {
autocomplete: 'on',
},
})
expect(wrapper.vm.$attrs.autocomplete).toBe('on')
})
}) | the_stack |
import {
DydxBridgeActionType,
DydxBridgeData,
dydxBridgeDataEncoder,
encodeERC20AssetData,
encodeERC20BridgeAssetData,
IDydxContract,
} from '@0x/contracts-asset-proxy';
import { ERC20TokenContract } from '@0x/contracts-erc20';
import { ExchangeContract } from '@0x/contracts-exchange';
import {
blockchainTests,
constants,
expect,
FillEventArgs,
getRandomInteger,
Numberish,
} from '@0x/contracts-test-utils';
import { orderHashUtils } from '@0x/order-utils';
import { Order } from '@0x/types';
import { BigNumber, fromTokenUnitAmount, logUtils } from '@0x/utils';
import { DecodedLogEntry } from 'ethereum-types';
import { contractAddresses } from '../mainnet_fork_utils';
// A chonky dai wallet.
const MAKER_ADDRESS = '0xe235AAa27428E32cA14089b03F532c571C7ab3c8';
// Also a chonky dai wallet.
const TAKER_ADDRESS = '0x66c57bf505a85a74609d2c83e94aabb26d691e1f';
blockchainTests.configure({
fork: {
unlockedAccounts: [TAKER_ADDRESS, MAKER_ADDRESS],
},
});
blockchainTests.fork.skip('DydxBridge fill benchmarks', env => {
let exchange: ExchangeContract;
let dydx: IDydxContract;
before(async () => {
exchange = new ExchangeContract(contractAddresses.exchange, env.provider, env.txDefaults);
dydx = new IDydxContract(DYDX_ADDRESS, env.provider, env.txDefaults);
// Initialize a dydx account with some Dai collateral and USDC borrowed.
await approveSpenderAsync(MAKER_ADDRESS, BRIDGE_ADDRESS, DAI_ADDRESS);
await approveSpenderAsync(MAKER_ADDRESS, DYDX_ADDRESS, DAI_ADDRESS);
await dydx
.setOperators([{ operator: BRIDGE_ADDRESS, trusted: true }])
.awaitTransactionSuccessAsync({ from: MAKER_ADDRESS }, { shouldValidate: false });
await depositAndWithdrawAsync(100, 1);
});
async function approveSpenderAsync(
ownerAddress: string,
spenderAddress: string,
tokenAddress: string,
): Promise<void> {
const token = new ERC20TokenContract(tokenAddress, env.provider, env.txDefaults);
await token.approve(spenderAddress, constants.MAX_UINT256).awaitTransactionSuccessAsync(
{
from: ownerAddress,
},
{ shouldValidate: false },
);
}
const ZERO = constants.ZERO_AMOUNT;
const BRIDGE_ADDRESS = contractAddresses.dydxBridge;
const DYDX_ACCOUNT_ID = getRandomInteger(0, constants.MAX_UINT256);
const DYDX_ADDRESS = '0x1E0447b19BB6EcFdAe1e4AE1694b0C3659614e4e';
const DAI_ADDRESS = '0x6B175474E89094C44Da98b954EedeAC495271d0F';
const USDC_ADDRESS = '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48';
const TOKEN_INFO: { [addr: string]: { decimals: number; marketId: number } } = {
[DAI_ADDRESS]: {
decimals: 18,
marketId: 3,
},
[USDC_ADDRESS]: {
decimals: 6,
marketId: 2,
},
};
function encodeDydxBridgeAssetData(fromToken: string, toToken: string, depositRate: number = 1): string {
const fromTokenMarketId = new BigNumber(TOKEN_INFO[fromToken].marketId);
const toTokenMarketId = new BigNumber(TOKEN_INFO[toToken].marketId);
const bridgeData: DydxBridgeData = {
accountNumbers: [DYDX_ACCOUNT_ID],
actions: [
...(depositRate > 0
? [
{
actionType: DydxBridgeActionType.Deposit,
accountIdx: ZERO,
marketId: fromTokenMarketId,
...createConversionFraction(toToken, fromToken, depositRate),
},
]
: []),
{
actionType: DydxBridgeActionType.Withdraw,
accountIdx: ZERO,
marketId: toTokenMarketId,
...createConversionFraction(toToken, toToken, 1),
},
],
};
return encodeERC20BridgeAssetData(
toToken,
contractAddresses.dydxBridge,
dydxBridgeDataEncoder.encode({ bridgeData }),
);
}
// Create fraction with default 18 decimal precision.
function createConversionFraction(
fromToken: string,
toToken: string,
rate: number,
): {
conversionRateNumerator: BigNumber;
conversionRateDenominator: BigNumber;
} {
const fromDecimals = TOKEN_INFO[fromToken].decimals;
const toDecimals = TOKEN_INFO[toToken].decimals;
return {
conversionRateNumerator: fromTokenUnitAmount(rate, toDecimals),
conversionRateDenominator: fromTokenUnitAmount(1, fromDecimals),
};
}
enum DydxActionType {
Deposit = 0,
Withdraw = 1,
}
enum DydxAssetDenomination {
Wei = 0,
Par = 1,
}
enum DydxAssetReference {
Delta = 0,
Target = 1,
}
async function depositAndWithdrawAsync(depositSize: Numberish, withdrawSize: Numberish): Promise<void> {
const dai = TOKEN_INFO[DAI_ADDRESS];
const usdc = TOKEN_INFO[USDC_ADDRESS];
await dydx
.operate(
[{ owner: MAKER_ADDRESS, number: DYDX_ACCOUNT_ID }],
[
{
actionType: DydxActionType.Deposit,
accountIdx: ZERO,
amount: {
sign: true,
denomination: DydxAssetDenomination.Wei,
ref: DydxAssetReference.Delta,
value: fromTokenUnitAmount(depositSize, dai.decimals),
},
primaryMarketId: new BigNumber(dai.marketId),
secondaryMarketId: new BigNumber(constants.NULL_ADDRESS),
otherAddress: MAKER_ADDRESS,
otherAccountIdx: ZERO,
data: constants.NULL_BYTES,
},
{
actionType: DydxActionType.Withdraw,
accountIdx: ZERO,
amount: {
sign: false,
denomination: DydxAssetDenomination.Wei,
ref: DydxAssetReference.Delta,
value: fromTokenUnitAmount(withdrawSize, usdc.decimals),
},
primaryMarketId: new BigNumber(usdc.marketId),
secondaryMarketId: new BigNumber(constants.NULL_ADDRESS),
otherAddress: MAKER_ADDRESS,
otherAccountIdx: ZERO,
data: constants.NULL_BYTES,
},
],
)
.awaitTransactionSuccessAsync({ from: MAKER_ADDRESS }, { shouldValidate: false });
}
const DYDX_ASSET_DATA = encodeDydxBridgeAssetData(DAI_ADDRESS, USDC_ADDRESS);
const DAI_ASSET_DATA = encodeERC20AssetData(DAI_ADDRESS);
const SIGNATURE_PRESIGN = '0x06';
const PROTOCOL_FEE = 150e3;
const ONE_DAY = 60 * 60 * 24;
const ORDER_DEFAULTS: Order = {
chainId: 1,
exchangeAddress: contractAddresses.exchange,
expirationTimeSeconds: new BigNumber(Math.floor(Date.now() / 1e3) + ONE_DAY),
salt: getRandomInteger(0, constants.MAX_UINT256),
makerAddress: MAKER_ADDRESS,
feeRecipientAddress: constants.NULL_ADDRESS,
senderAddress: constants.NULL_ADDRESS,
takerAddress: constants.NULL_ADDRESS,
makerAssetAmount: fromTokenUnitAmount(50, TOKEN_INFO[USDC_ADDRESS].decimals),
takerAssetAmount: fromTokenUnitAmount(100, TOKEN_INFO[USDC_ADDRESS].decimals),
makerFee: constants.ZERO_AMOUNT,
takerFee: constants.ZERO_AMOUNT,
makerAssetData: DYDX_ASSET_DATA,
takerAssetData: DAI_ASSET_DATA,
makerFeeAssetData: constants.NULL_BYTES,
takerFeeAssetData: constants.NULL_BYTES,
};
describe('gas usage', () => {
async function prepareOrderAsync(fields: Partial<Order> = {}): Promise<Order> {
const order = {
...ORDER_DEFAULTS,
...fields,
};
const orderHash = orderHashUtils.getOrderHash(order);
await exchange.preSign(orderHash).awaitTransactionSuccessAsync(
{
from: order.makerAddress,
},
{ shouldValidate: false },
);
await approveSpenderAsync(TAKER_ADDRESS, contractAddresses.erc20Proxy, DAI_ADDRESS);
return order;
}
// Last run: 375066
it('filling a DAI->USDC dydx order with a deposit action', async () => {
const order = await prepareOrderAsync();
const receipt = await exchange
.fillOrder(order, order.takerAssetAmount, SIGNATURE_PRESIGN)
.awaitTransactionSuccessAsync(
{
from: TAKER_ADDRESS,
value: PROTOCOL_FEE,
gasPrice: 1,
},
{ shouldValidate: false },
);
const fillEvent = (receipt.logs as Array<DecodedLogEntry<FillEventArgs>>).find(log => log.event === 'Fill');
expect(fillEvent).to.exist('');
logUtils.log(`gas used: ${receipt.gasUsed}`);
});
// Last run: 315896
it('filling a DAI->USDC dydx order with no deposit action', async () => {
const order = await prepareOrderAsync({
makerAssetData: encodeDydxBridgeAssetData(DAI_ADDRESS, USDC_ADDRESS, 0),
});
const receipt = await exchange
.fillOrder(order, order.takerAssetAmount, SIGNATURE_PRESIGN)
.awaitTransactionSuccessAsync(
{
from: TAKER_ADDRESS,
value: PROTOCOL_FEE,
gasPrice: 1,
},
{ shouldValidate: false },
);
const fillEvent = (receipt.logs as Array<DecodedLogEntry<FillEventArgs>>).find(log => log.event === 'Fill');
expect(fillEvent).to.exist('');
logUtils.log(`gas used: ${receipt.gasUsed}`);
});
});
}); | the_stack |
import _ from 'lodash';
import { DataTable, SQLResult } from 'Models';
import moment from 'moment';
import {
getTenants,
getInstances,
getInstance,
putInstance,
setInstanceState,
setTableState,
dropInstance,
updateInstanceTags,
getClusterConfig,
getQueryTables,
getTableSchema,
getQueryResult,
getTenantTable,
getTableSize,
getIdealState,
getExternalView,
getTenantTableDetails,
getSegmentMetadata,
reloadSegment,
getClusterInfo,
zookeeperGetList,
zookeeperGetData,
zookeeperGetListWithStat,
zookeeperGetStat,
zookeeperPutData,
zookeeperDeleteNode,
getBrokerListOfTenant,
getServerListOfTenant,
deleteSegment,
putTable,
putSchema,
deleteTable,
deleteSchema,
reloadAllSegments,
reloadStatus,
rebalanceServersForTable,
rebalanceBrokersForTable,
validateSchema,
validateTable,
saveSchema,
saveTable,
getSchema,
getSchemaList,
getState,
getInfo,
authenticateUser,
getSegmentDebugInfo,
requestTable,
requestUserList,
requestAddUser,
requestDeleteUser,
requestUpdateUser
} from '../requests';
import { baseApi } from './axios-config';
import Utils from './Utils';
const JSONbig = require('json-bigint')({'storeAsString': true})
// This method is used to display tenants listing on cluster manager home page
// API: /tenants
// Expected Output: {columns: [], records: []}
const getTenantsData = () => {
return getTenants().then(({ data }) => {
const records = _.union(data.SERVER_TENANTS, data.BROKER_TENANTS);
const serverPromiseArr = [], brokerPromiseArr = [], tablePromiseArr = [];
const finalResponse = {
columns: ['Tenant Name', 'Server', 'Broker', 'Tables'],
records: []
};
records.map((record)=>{
finalResponse.records.push([
record
]);
serverPromiseArr.push(getServerOfTenant(record));
brokerPromiseArr.push(getBrokerOfTenant(record));
tablePromiseArr.push(getTenantTable(record));
});
return Promise.all([
Promise.all(serverPromiseArr),
Promise.all(brokerPromiseArr),
Promise.all(tablePromiseArr)
]).then((results)=>{
const serversResponseData = results[0];
const brokersResponseData = results[1];
const tablesResponseData = results[2];
tablesResponseData.map((tableResult, index)=>{
const serverCount = serversResponseData[index]?.length || 0;
const brokerCount = brokersResponseData[index]?.length || 0;
const tablesCount = tableResult.data.tables.length
finalResponse.records[index].push(serverCount, brokerCount, tablesCount);
});
return finalResponse;
});
});
};
// This method is used to fetch all instances on cluster manager home page
// API: /instances
// Expected Output: {Controller: ['Controller1', 'Controller2'], Broker: ['Broker1', 'Broker2']}
const getAllInstances = () => {
return getInstances().then(({ data }) => {
const initialVal: DataTable = {};
// It will create instances list array like
// {Controller: ['Controller1', 'Controller2'], Broker: ['Broker1', 'Broker2']}
const groupedData = data.instances.reduce((r, a) => {
const y = a.split('_');
const key = y[0].trim();
r[key] = [...(r[key] || []), a];
return r;
}, initialVal);
return {'Controller': groupedData.Controller, ...groupedData};
});
};
// This method is used to display instance data on cluster manager home page
// API: /instances/:instaneName
// Expected Output: {columns: [], records: []}
const getInstanceData = (instances, liveInstanceArr) => {
const promiseArr = [...instances.map((inst) => getInstance(inst))];
return Promise.all(promiseArr).then((result) => {
return {
columns: ['Instance Name', 'Enabled', 'Hostname', 'Port', 'Status'],
records: [
...result.map(({ data }) => [
data.instanceName,
data.enabled,
data.hostName,
data.port,
liveInstanceArr.indexOf(data.instanceName) > -1 ? 'Alive' : 'Dead'
]),
],
};
});
};
// This method is used to fetch cluster name
// API: /cluster/info
// Expected Output: {clusterName: ''}
const getClusterName = () => {
return getClusterInfo().then(({ data }) => {
return data.clusterName;
});
};
// This method is used to fetch array of live instances name
// API: /zk/ls?path=:ClusterName/LIVEINSTANCES
// Expected Output: []
const getLiveInstance = (clusterName) => {
const params = encodeURIComponent(`/${clusterName}/LIVEINSTANCES`);
return zookeeperGetList(params).then((data) => {
return data;
});
};
// This method is used to diaplay cluster congifuration on cluster manager home page
// API: /cluster/configs
// Expected Output: {columns: [], records: []}
const getClusterConfigData = () => {
return getClusterConfig().then(({ data }) => {
return {
columns: ['Property', 'Value'],
records: [...Object.keys(data).map((key) => [key, data[key]])],
};
});
};
// This method is used to fetch cluster congifuration
// API: /cluster/configs
// Expected Output: {key: value}
const getClusterConfigJSON = () => {
return getClusterConfig().then(({ data }) => {
return data;
});
};
// This method is used to display table listing on query page
// API: /tables
// Expected Output: {columns: [], records: []}
const getQueryTablesList = ({bothType = false}) => {
const promiseArr = bothType ? [getQueryTables('realtime'), getQueryTables('offline')] : [getQueryTables()];
return Promise.all(promiseArr).then((results) => {
const responseObj = {
columns: ['Tables'],
records: []
};
results.map((result)=>{
result.data.tables.map((table)=>{
responseObj.records.push([table]);
});
});
return responseObj;
});
};
// This method is used to display particular table schema on query page
// API: /tables/:tableName/schema
const getTableSchemaData = (tableName) => {
return getTableSchema(tableName).then(({ data }) => {
return data;
});
};
const getAsObject = (str: SQLResult) => {
if (typeof str === 'string' || str instanceof String) {
try {
return JSONbig.parse(str);
} catch(e) {
return JSON.parse(JSON.stringify(str));
}
}
return str;
};
// This method is used to display query output in tabular format as well as JSON format on query page
// API: /:urlName (Eg: sql or pql)
// Expected Output: {columns: [], records: []}
const getQueryResults = (params, checkedOptions) => {
return getQueryResult(params).then(({ data }) => {
let queryResponse = getAsObject(data);
let errorStr = '';
let dataArray = [];
let columnList = [];
// if sql api throws error, handle here
if(typeof queryResponse === 'string'){
errorStr = queryResponse;
} else if (queryResponse && queryResponse.exceptions && queryResponse.exceptions.length) {
errorStr = JSON.stringify(queryResponse.exceptions, null, 2);
} else
{
if (checkedOptions.querySyntaxPQL === true)
{
if (queryResponse)
{
if (queryResponse.selectionResults)
{
// Selection query
columnList = queryResponse.selectionResults.columns;
dataArray = queryResponse.selectionResults.results;
}
else if (!queryResponse.aggregationResults[0]?.groupByResult)
{
// Simple aggregation query
columnList = _.map(queryResponse.aggregationResults, (aggregationResult) => {
return {title: aggregationResult.function};
});
dataArray.push(_.map(queryResponse.aggregationResults, (aggregationResult) => {
return aggregationResult.value;
}));
}
else if (queryResponse.aggregationResults[0]?.groupByResult)
{
// Aggregation group by query
// TODO - Revisit
const columns = queryResponse.aggregationResults[0].groupByColumns;
columns.push(queryResponse.aggregationResults[0].function);
columnList = _.map(columns, (columnName) => {
return columnName;
});
dataArray = _.map(queryResponse.aggregationResults[0].groupByResult, (aggregationGroup) => {
const row = aggregationGroup.group;
row.push(aggregationGroup.value);
return row;
});
}
}
}
else if (queryResponse.resultTable?.dataSchema?.columnNames?.length)
{
columnList = queryResponse.resultTable.dataSchema.columnNames;
dataArray = queryResponse.resultTable.rows;
}
}
const columnStats = ['timeUsedMs',
'numDocsScanned',
'totalDocs',
'numServersQueried',
'numServersResponded',
'numSegmentsQueried',
'numSegmentsProcessed',
'numSegmentsMatched',
'numConsumingSegmentsQueried',
'numEntriesScannedInFilter',
'numEntriesScannedPostFilter',
'numGroupsLimitReached',
'partialResponse',
'minConsumingFreshnessTimeMs',
'offlineThreadCpuTimeNs',
'realtimeThreadCpuTimeNs',
'offlineSystemActivitiesCpuTimeNs',
'realtimeSystemActivitiesCpuTimeNs',
'offlineResponseSerializationCpuTimeNs',
'realtimeResponseSerializationCpuTimeNs',
'offlineTotalCpuTimeNs',
'realtimeTotalCpuTimeNs'
];
return {
error: errorStr,
result: {
columns: columnList,
records: dataArray,
},
queryStats: {
columns: columnStats,
records: [[queryResponse.timeUsedMs, queryResponse.numDocsScanned, queryResponse.totalDocs, queryResponse.numServersQueried, queryResponse.numServersResponded,
queryResponse.numSegmentsQueried, queryResponse.numSegmentsProcessed, queryResponse.numSegmentsMatched, queryResponse.numConsumingSegmentsQueried,
queryResponse.numEntriesScannedInFilter, queryResponse.numEntriesScannedPostFilter, queryResponse.numGroupsLimitReached,
queryResponse.partialResponse ? queryResponse.partialResponse : '-', queryResponse.minConsumingFreshnessTimeMs,
queryResponse.offlineThreadCpuTimeNs, queryResponse.realtimeThreadCpuTimeNs,
queryResponse.offlineSystemActivitiesCpuTimeNs, queryResponse.realtimeSystemActivitiesCpuTimeNs,
queryResponse.offlineResponseSerializationCpuTimeNs, queryResponse.realtimeResponseSerializationCpuTimeNs,
queryResponse.offlineTotalCpuTimeNs, queryResponse.realtimeTotalCpuTimeNs]]
},
data: queryResponse,
};
});
};
// This method is used to display table data of a particular tenant
// API: /tenants/:tenantName/tables
// /tables/:tableName/size
// /tables/:tableName/idealstate
// /tables/:tableName/externalview
// Expected Output: {columns: [], records: []}
const getTenantTableData = (tenantName) => {
return getTenantTable(tenantName).then(({ data }) => {
const tableArr = data.tables.map((table) => table);
return getAllTableDetails(tableArr);
});
};
const getSchemaObject = async (schemaName) =>{
let schemaObj:Array<any> = [];
let {data} = await getSchema(schemaName);
console.log(data);
schemaObj.push(data.schemaName);
schemaObj.push(data.dimensionFieldSpecs ? data.dimensionFieldSpecs.length : 0);
schemaObj.push(data.dateTimeFieldSpecs ? data.dateTimeFieldSpecs.length : 0);
schemaObj.push(data.metricFieldSpecs ? data.metricFieldSpecs.length : 0);
schemaObj.push(schemaObj[1] + schemaObj[2] + schemaObj[3]);
return schemaObj;
}
const getAllSchemaDetails = async () => {
const columnHeaders = ["Schema Name", "Dimension Columns", "Date-Time Columns", "Metrics Columns", "Total Columns"]
let schemaDetails:Array<any> = [];
let promiseArr = [];
const {data} = await getSchemaList()
promiseArr = data.map(async (o)=>{
return await getSchema(o);
});
const results = await Promise.all(promiseArr);
schemaDetails = results.map((obj)=>{
let schemaObj = [];
schemaObj.push(obj.data.schemaName);
schemaObj.push(obj.data.dimensionFieldSpecs ? obj.data.dimensionFieldSpecs.length : 0);
schemaObj.push(obj.data.dateTimeFieldSpecs ? obj.data.dateTimeFieldSpecs.length : 0);
schemaObj.push(obj.data.metricFieldSpecs ? obj.data.metricFieldSpecs.length : 0);
schemaObj.push(schemaObj[1] + schemaObj[2] + schemaObj[3]);
return schemaObj;
})
return {
columns: columnHeaders,
records: schemaDetails
};
}
const getAllTableDetails = (tablesList) => {
const columnHeaders = [
'Table Name',
'Reported Size',
'Estimated Size',
'Number of Segments',
'Status',
];
if (tablesList.length) {
const promiseArr = [];
tablesList.map((name) => {
promiseArr.push(getTableSize(name));
promiseArr.push(getIdealState(name));
promiseArr.push(getExternalView(name));
});
return Promise.all(promiseArr).then((results) => {
const finalRecordsArr = [];
let singleTableData = [];
let idealStateObj = null;
let externalViewObj = null;
results.map((result, index) => {
// since we have 3 promises, we are using mod 3 below
if (index % 3 === 0) {
// response of getTableSize API
const {
tableName,
reportedSizeInBytes,
estimatedSizeInBytes,
} = result.data;
singleTableData.push(
tableName,
Utils.formatBytes(reportedSizeInBytes),
Utils.formatBytes(estimatedSizeInBytes)
);
} else if (index % 3 === 1) {
// response of getIdealState API
idealStateObj = result.data.OFFLINE || result.data.REALTIME || {};
} else if (index % 3 === 2) {
// response of getExternalView API
externalViewObj = result.data.OFFLINE || result.data.REALTIME || {};
const externalSegmentCount = Object.keys(externalViewObj).length;
const idealSegmentCount = Object.keys(idealStateObj).length;
// Generating data for the record
singleTableData.push(
`${externalSegmentCount} / ${idealSegmentCount}`,
Utils.getSegmentStatus(idealStateObj, externalViewObj)
);
// saving into records array
finalRecordsArr.push(singleTableData);
// resetting the required variables
singleTableData = [];
idealStateObj = null;
externalViewObj = null;
}
});
return {
columns: columnHeaders,
records: finalRecordsArr,
};
});
}
return {
columns: columnHeaders,
records: []
};
};
// This method is used to display summary of a particular tenant table
// API: /tables/:tableName/size
// Expected Output: {tableName: '', reportedSize: '', estimatedSize: ''}
const getTableSummaryData = (tableName) => {
return getTableSize(tableName).then(({ data }) => {
return {
tableName: data.tableName,
reportedSize: data.reportedSizeInBytes,
estimatedSize: data.estimatedSizeInBytes,
};
});
};
// This method is used to display segment list of a particular tenant table
// API: /tables/:tableName/idealstate
// /tables/:tableName/externalview
// Expected Output: {columns: [], records: [], externalViewObject: {}}
const getSegmentList = (tableName) => {
const promiseArr = [];
promiseArr.push(getIdealState(tableName));
promiseArr.push(getExternalView(tableName));
return Promise.all(promiseArr).then((results) => {
const idealStateObj = results[0].data.OFFLINE || results[0].data.REALTIME;
const externalViewObj = results[1].data.OFFLINE || results[1].data.REALTIME;
return {
columns: ['Segment Name', 'Status'],
records: Object.keys(idealStateObj).map((key) => {
return [
key,
getSegmentStatus(idealStateObj[key], externalViewObj[key])
];
}),
externalViewObj
};
});
};
const getSegmentStatus = (idealSegment, externalViewSegment) => {
if(_.isEqual(idealSegment, externalViewSegment)){
return 'Good';
}
let goodCount = 0;
// There is a possibility that the segment is in ideal state but not in external view
// making external view segment as null.
const totalCount = externalViewSegment ? Object.keys(externalViewSegment).length : 0;
Object.keys(idealSegment).map((replicaName)=>{
const idealReplicaState = idealSegment[replicaName];
const externalReplicaState = externalViewSegment ? externalViewSegment[replicaName] : '';
if(idealReplicaState === externalReplicaState || (externalReplicaState === 'CONSUMING')){
goodCount += 1;
}
});
if(goodCount === 0 || totalCount === 0){
return 'Bad';
} else if(goodCount === totalCount){
return 'Good';
} else {
return `Partial-${goodCount}/${totalCount}`;
}
};
// This method is used to display JSON format of a particular tenant table
// API: /tables/:tableName/idealstate
// /tables/:tableName/externalview
// Expected Output: {columns: [], records: []}
const getTableDetails = (tableName) => {
return getTenantTableDetails(tableName).then(({ data }) => {
return data;
});
};
// This method is used to display summary of a particular segment, replia set as well as JSON format of a tenant table
// API: /tables/tableName/externalview
// /segments/:tableName/:segmentName/metadata
// Expected Output: {columns: [], records: []}
const getSegmentDetails = (tableName, segmentName) => {
const tableInfo = tableName.split('_');
const promiseArr = [];
promiseArr.push(getExternalView(tableName));
promiseArr.push(getSegmentMetadata(tableName, segmentName));
promiseArr.push(getSegmentDebugInfo(tableInfo[0], tableInfo[1].toLowerCase()));
return Promise.all(promiseArr).then((results) => {
const obj = results[0].data.OFFLINE || results[0].data.REALTIME;
const segmentMetaData = results[1].data;
const debugObj = results[2].data;
let debugInfoObj = {};
if(debugObj && debugObj[0]){
const debugInfosObj = debugObj[0].segmentDebugInfos?.find((o)=>{return o.segmentName === segmentName});
if(debugInfosObj){
const serverNames = _.keys(debugInfosObj?.serverState || {});
serverNames?.map((serverName)=>{
debugInfoObj[serverName] = debugInfosObj.serverState[serverName]?.errorInfo?.errorMessage;
});
}
}
console.log(debugInfoObj);
const result = [];
for (const prop in obj[segmentName]) {
if (obj[segmentName]) {
const status = obj[segmentName][prop];
result.push([prop, status === 'ERROR' ? {value: status, tooltip: debugInfoObj[prop]} : status]);
}
}
const segmentMetaDataJson = { ...segmentMetaData }
delete segmentMetaDataJson.indexes
delete segmentMetaDataJson.columns
return {
replicaSet: {
columns: ['Server Name', 'Status'],
records: [...result],
},
indexes: {
columns: ['Field Name', 'Bloom Filter', 'Dictionary', 'Forward Index', 'Sorted', 'Inverted Index', 'JSON Index', 'Null Value Vector Reader', 'Range Index'],
records: Object.keys(segmentMetaData.indexes).map(fieldName => [
fieldName,
segmentMetaData.indexes[fieldName]["bloom-filter"] === "YES",
segmentMetaData.indexes[fieldName]["dictionary"] === "YES",
segmentMetaData.indexes[fieldName]["forward-index"] === "YES",
((segmentMetaData.columns || []).filter(row => row.columnName === fieldName)[0] || {sorted: false}).sorted,
segmentMetaData.indexes[fieldName]["inverted-index"] === "YES",
segmentMetaData.indexes[fieldName]["json-index"] === "YES",
segmentMetaData.indexes[fieldName]["null-value-vector-reader"] === "YES",
segmentMetaData.indexes[fieldName]["range-index"] === "YES",
])
},
summary: {
segmentName,
totalDocs: segmentMetaData['segment.total.docs'],
createTime: moment(+segmentMetaData['segment.creation.time']).format(
'MMMM Do YYYY, h:mm:ss'
),
},
JSON: segmentMetaDataJson
};
});
};
// This method is used to fetch the LIVEINSTANCE config
// API: /zk/get?path=:clusterName/LIVEINSTANCES/:instanceName
// Expected Output: configuration in JSON format
const getLiveInstanceConfig = (clusterName, instanceName) => {
const params = encodeURIComponent(`/${clusterName}/LIVEINSTANCES/${instanceName}`);
return zookeeperGetData(params).then((res) => {
return res.data;
});
};
// This method is used to fetch the instance config
// API: /zk/get?path=:clusterName/CONFIGS/PARTICIPANT/:instanceName
// Expected Output: configuration in JSON format
const getInstanceConfig = (clusterName, instanceName) => {
const params = encodeURIComponent(`/${clusterName}/CONFIGS/PARTICIPANT/${instanceName}`);
return zookeeperGetData(params).then((res) => {
return res.data;
});
};
// This method is used to get instance info
// API: /instances/:instanceName
const getInstanceDetails = (instanceName) => {
return getInstance(instanceName).then((res)=>{
return res.data;
});
};
const updateInstanceDetails = (instanceName, instanceDetails) => {
return putInstance(instanceName, instanceDetails).then((res)=>{
return res.data;
})
};
// This method is responsible to prepare the data for tree structure
// It internally calls getNodeData() which makes the required API calls.
const getZookeeperData = (path, count) => {
let counter = count;
const newTreeData = [{
nodeId: `${counter++}`,
label: path,
child: [],
isLeafNode: false,
hasChildRendered: true
}];
return getNodeData(path).then((obj)=>{
const { currentNodeData, currentNodeMetadata, currentNodeListStat } = obj;
const pathNames = Object.keys(currentNodeListStat);
pathNames.map((pathName)=>{
newTreeData[0].child.push({
nodeId: `${counter++}`,
label: pathName,
fullPath: path === '/' ? path+pathName : `${path}/${pathName}`,
child: [],
isLeafNode: currentNodeListStat[pathName].numChildren === 0,
hasChildRendered: false
});
});
return { newTreeData, currentNodeData, currentNodeMetadata, currentNodeListStat, counter };
});
};
// This method is responsible to get data, get list with stats and get stats.
// API: /zk/get => Get node data
// API: /zk/lsl => Get node list with stats
// API: /zk/get => Get node stats
const getNodeData = (path) => {
const params = encodeURIComponent(path);
const promiseArr = [
zookeeperGetData(params),
zookeeperGetListWithStat(params),
zookeeperGetStat(params)
];
return Promise.all(promiseArr).then((results)=>{
const currentNodeData = results[0].data || {};
const currentNodeListStat = results[1].data;
const currentNodeMetadata = results[2].data;
if(currentNodeMetadata.ctime || currentNodeMetadata.mtime){
currentNodeMetadata.ctime = moment(+currentNodeMetadata.ctime).format(
'MMMM Do YYYY, h:mm:ss'
);
currentNodeMetadata.mtime = moment(+currentNodeMetadata.mtime).format(
'MMMM Do YYYY, h:mm:ss'
);
}
return { currentNodeData, currentNodeMetadata, currentNodeListStat };
});
};
const putNodeData = (data) => {
const serializedData = Utils.serialize(data);
return zookeeperPutData(serializedData).then((obj)=>{
return obj;
});
};
const deleteNode = (path) => {
const params = encodeURIComponent(path);
return zookeeperDeleteNode(params).then((obj)=>{
return obj;
});
};
const getBrokerOfTenant = (tenantName) => {
return getBrokerListOfTenant(tenantName).then((response)=>{
return !response.data.error ? response.data : [];
});
};
const getServerOfTenant = (tenantName) => {
return getServerListOfTenant(tenantName).then((response)=>{
return !response.data.error ? response.data.ServerInstances : [];
});
};
const updateTags = (instanceName, tagsList) => {
return updateInstanceTags(instanceName, tagsList.toString()).then((response)=>{
return response.data;
});
};
const toggleInstanceState = (instanceName, state) => {
return setInstanceState(instanceName, state).then((response)=>{
return response.data;
});
};
const toggleTableState = (tableName, state, tableType) => {
return setTableState(tableName, state, tableType).then((response)=>{
return response.data;
});
};
const deleteInstance = (instanceName) => {
return dropInstance(instanceName).then((response)=>{
return response.data;
});
};
const reloadSegmentOp = (tableName, segmentName) => {
return reloadSegment(tableName, segmentName).then((response)=>{
return response.data;
});
};
const reloadAllSegmentsOp = (tableName, tableType) => {
return reloadAllSegments(tableName, tableType).then((response)=>{
return response.data;
});
};
const reloadStatusOp = (tableName, tableType) => {
return reloadStatus(tableName, tableType).then((response)=>{
return response.data;
});
}
const deleteSegmentOp = (tableName, segmentName) => {
return deleteSegment(tableName, segmentName).then((response)=>{
return response.data;
});
};
const updateTable = (tableName: string, table: string) => {
return putTable(tableName, table).then((res)=>{
return res.data;
})
};
const updateSchema = (schemaName: string, schema: string) => {
return putSchema(schemaName, schema).then((res)=>{
return res.data;
})
};
const deleteTableOp = (tableName) => {
return deleteTable(tableName).then((response)=>{
return response.data;
});
};
const deleteSchemaOp = (tableName) => {
return deleteSchema(tableName).then((response)=>{
return response.data;
});
};
const rebalanceServersForTableOp = (tableName, queryParams) => {
const q_params = Utils.serialize(queryParams);
return rebalanceServersForTable(tableName, q_params).then((response)=>{
return response.data;
});
};
const rebalanceBrokersForTableOp = (tableName) => {
return rebalanceBrokersForTable(tableName).then((response)=>{
return response.data;
});
};
const validateSchemaAction = (schemaObj) => {
return validateSchema(schemaObj).then((response)=>{
return response.data;
});
};
const validateTableAction = (tableObj) => {
return validateTable(tableObj).then((response)=>{
return response.data;
});
};
const saveSchemaAction = (schemaObj) => {
return saveSchema(schemaObj).then((response)=>{
return response.data;
});
};
const saveTableAction = (tableObj) => {
return saveTable(tableObj).then((response)=>{
return response.data;
});
};
const getSchemaData = (schemaName) => {
return getSchema(schemaName).then((response)=>{
return response.data;
});
};
const getTableState = (tableName, tableType) => {
return getState(tableName, tableType).then((response)=>{
return response.data;
});
};
const getAuthInfo = () => {
return getInfo().then((response)=>{
return response.data;
});
};
const getWellKnownOpenIdConfiguration = (issuer) => {
return baseApi
.get(`${issuer}/.well-known/openid-configuration`)
.then((response) => {
return response.data;
});
};
const verifyAuth = (authToken) => {
return authenticateUser(authToken).then((response)=>{
return response.data;
});
};
const getAccessTokenFromHashParams = () => {
let accessToken = '';
const urlSearchParams = new URLSearchParams(location.hash.substr(1));
if (urlSearchParams.has('access_token')) {
accessToken = urlSearchParams.get('access_token') as string;
}
return accessToken;
};
const getURLWithoutAccessToken = (fallbackUrl = '/'): string => {
let prefix = '';
let url = location.hash.substring(1);
if (url.includes('access_token=')) {
if (url.startsWith('/')) {
prefix = '/';
url = url.substring(1);
}
const urlSearchParams = new URLSearchParams(url);
urlSearchParams.delete('access_token');
const urlParams = [];
// Loop over to get all params with empty value
for (const [key, value] of urlSearchParams.entries()) {
if (!value) {
urlParams.push(key);
}
}
// Loop over to delete all params with empty value
for (const key of urlParams){
urlSearchParams.delete(key);
}
// Check if any param remains, prepend it to urlParam as string
if(urlSearchParams.toString()){
urlParams.unshift(urlSearchParams.toString());
}
url = urlParams.join('&');
} else {
url = fallbackUrl;
}
return `${prefix}${url}`;
};
const getTable = ()=>{
return requestTable().then(response=>{
return response.data;
})
};
const getUserList = ()=>{
return requestUserList().then(response=>{
return response.data;
})
};
const addUser = (userObject)=>{
return requestAddUser(userObject).then(response=>{
return response.data;
})
};
const deleteUser = (userObject)=>{
return requestDeleteUser(userObject).then(response=>{
return response.data;
})
};
const updateUser = (userObject, passwordChanged) =>{
return requestUpdateUser(userObject, passwordChanged).then(response=>{
return response.data;
})
}
export default {
getTenantsData,
getAllInstances,
getInstanceData,
getClusterConfigData,
getClusterConfigJSON,
getQueryTablesList,
getTableSchemaData,
getQueryResults,
getTenantTableData,
getAllTableDetails,
getTableSummaryData,
getSegmentList,
getSegmentStatus,
getTableDetails,
getSegmentDetails,
getClusterName,
getLiveInstance,
getLiveInstanceConfig,
getInstanceConfig,
getInstanceDetails,
updateInstanceDetails,
getZookeeperData,
getNodeData,
putNodeData,
deleteNode,
getBrokerOfTenant,
getServerOfTenant,
updateTags,
toggleInstanceState,
toggleTableState,
deleteInstance,
deleteSegmentOp,
reloadSegmentOp,
reloadStatusOp,
reloadAllSegmentsOp,
updateTable,
updateSchema,
deleteTableOp,
deleteSchemaOp,
rebalanceServersForTableOp,
rebalanceBrokersForTableOp,
validateSchemaAction,
validateTableAction,
saveSchemaAction,
saveTableAction,
getSchemaData,
getAllSchemaDetails,
getTableState,
getAuthInfo,
getWellKnownOpenIdConfiguration,
verifyAuth,
getAccessTokenFromHashParams,
getURLWithoutAccessToken,
getTable,
getUserList,
addUser,
deleteUser,
updateUser
}; | the_stack |
* @module iModels
*/
import { assert, BeEvent, GeoServiceStatus, GuidString, Id64, Id64String, IModelStatus, Mutable, OpenMode } from "@itwin/core-bentley";
import {
Angle, AxisIndex, AxisOrder, Constant, Geometry, Matrix3d, Point3d, Range3d, Range3dProps, Transform, Vector3d, XYAndZ, XYZProps,
YawPitchRollAngles, YawPitchRollProps,
} from "@itwin/core-geometry";
import { ChangesetIdWithIndex } from "./ChangesetProps";
import { Cartographic, CartographicProps } from "./geometry/Cartographic";
import { GeographicCRS, GeographicCRSProps } from "./geometry/CoordinateReferenceSystem";
import { AxisAlignedBox3d } from "./geometry/Placement";
import { IModelError } from "./IModelError";
import { ThumbnailProps } from "./Thumbnail";
/** The properties to open a connection to an iModel for RPC operations.
* @public
*/
export interface IModelRpcOpenProps {
/** The iTwin in which the iModel exists - must be defined for briefcases that are synchronized with iModelHub. */
readonly iTwinId?: GuidString;
/** Guid of the iModel. */
readonly iModelId?: GuidString;
/** Id of the last Changeset that was applied to the iModel - must be defined for briefcases that are synchronized with iModelHub.
* @note Changeset Ids are string hash values based on the content and parent.
*/
readonly changeset?: ChangesetIdWithIndex;
}
/** The properties that identify an opened iModel for RPC operations.
* @public
*/
export interface IModelRpcProps extends IModelRpcOpenProps {
/** Unique key used for identifying the iModel between the frontend and the backend */
readonly key: string;
}
/** Properties that position an iModel on the earth via [ECEF](https://en.wikipedia.org/wiki/ECEF) (Earth Centered Earth Fixed) coordinates
* @public
*/
export interface EcefLocationProps {
/** The Origin of an iModel on the earth in ECEF coordinates */
readonly origin: XYZProps;
/** The [orientation](https://en.wikipedia.org/wiki/Geographic_coordinate_conversion) of an iModel on the earth. */
readonly orientation: YawPitchRollProps;
/** Optional position on the earth used to establish the ECEF coordinates. */
readonly cartographicOrigin?: CartographicProps;
/** Optional X column vector used with [[yVector]] to calculate potentially non-rigid transform if a projection is present. */
readonly xVector?: XYZProps;
/** Optional Y column vector used with [[xVector]] to calculate potentially non-rigid transform if a projection is present. */
readonly yVector?: XYZProps;
}
/** Properties of the [Root Subject]($docs/bis/intro/glossary#subject-root).
* @public
*/
export interface RootSubjectProps {
/** The name of the root subject. */
readonly name: string;
/** Description of the root subject (optional). */
readonly description?: string;
}
/** Properties of an iModel that are always held in memory whenever one is opened, both on the frontend and on the backend .
* @public
*/
export interface IModelProps {
/** The name and description of the root subject of this iModel */
readonly rootSubject: RootSubjectProps;
/** The volume of the entire project, in spatial coordinates */
readonly projectExtents?: Range3dProps;
/** An offset to be applied to all spatial coordinates. This is normally used to transform spatial coordinates into the Cartesian coordinate system of a Geographic Coordinate System. */
readonly globalOrigin?: XYZProps;
/** The location of the iModel in Earth Centered Earth Fixed coordinates. iModel units are always meters */
readonly ecefLocation?: EcefLocationProps;
/** The Geographic Coordinate Reference System indicating the projection and datum used. */
readonly geographicCoordinateSystem?: GeographicCRSProps;
/** The name of the iModel. */
readonly name?: string;
}
/** The properties returned by the backend when creating a new [[IModelConnection]] from the frontend, either with Rpc or with Ipc.
* These properties describe the iModel held on the backend for thew newly formed connection and are used to construct a new
* [[IModelConnection]] instance on the frontend to access it.
* @public
*/
export type IModelConnectionProps = IModelProps & IModelRpcProps;
/** The properties that can be supplied when creating a *new* iModel.
* @public
*/
export interface CreateIModelProps extends IModelProps {
/** The GUID of new iModel. If not present, a GUID will be generated. */
readonly guid?: GuidString;
/** Client name for new iModel */
readonly client?: string;
/** Thumbnail for new iModel
* @alpha
*/
readonly thumbnail?: ThumbnailProps;
}
/** Encryption-related properties that can be supplied when creating or opening snapshot iModels.
* @public
*/
export interface IModelEncryptionProps {
/** The password used to encrypt/decrypt the snapshot iModel. */
readonly password?: string;
}
/**
* A key used to identify an opened [IModelDb]($backend) between the frontend and backend for Rpc and Ipc communications.
* Keys must be unique - that is there can never be two IModelDbs opened with the same key at any given time.
* If no key is supplied in a call to open an IModelDb, one is generated and returned.
* It is only necessary to supply a key if you have some reason to assign a specific value to identify an IModelDb.
* If you don't supply the key, you must use the returned value for Rpc and Ipc communications.
* @public
*/
export interface OpenDbKey {
readonly key?: string;
}
/** Options to open a [SnapshotDb]($backend).
* @public
*/
export interface SnapshotOpenOptions extends IModelEncryptionProps, OpenDbKey {
/** @internal */
readonly lazyBlockCache?: boolean;
/** @internal */
readonly autoUploadBlocks?: boolean;
/**
* The "base" name that can be used for creating temporary files related to this Db.
* The string should be a name related to the current Db filename using some known pattern so that all files named "baseName*" can be deleted externally during cleanup.
* It must be the name of a file (that may or may not exist) in a writable directory.
* If not present, the baseName will default to the database's file name (including the path).
* @internal
*/
readonly tempFileBase?: string;
}
/** Options to open a [StandaloneDb]($backend) via [StandaloneDb.openFile]($backend) from the backend,
* or [BriefcaseConnection.openStandalone]($frontend) from the frontend.
* @public
*/
export type StandaloneOpenOptions = OpenDbKey;
/** Options that can be supplied when creating snapshot iModels.
* @public
*/
export interface CreateSnapshotIModelProps extends IModelEncryptionProps {
/** If true, then create SQLite views for Model, Element, ElementAspect, and Relationship classes.
* These database views can often be useful for interoperability workflows.
*/
readonly createClassViews?: boolean;
}
/** The options that can be specified when creating an *empty* snapshot iModel.
* @see [SnapshotDb.createEmpty]($backend)
* @public
*/
export type CreateEmptySnapshotIModelProps = CreateIModelProps & CreateSnapshotIModelProps;
/** Options that can be supplied when creating standalone iModels.
* @internal
*/
export interface CreateStandaloneIModelProps extends IModelEncryptionProps {
/** If present, file will allow local editing, but cannot be used to create changesets */
readonly allowEdit?: string;
}
/** The options that can be specified when creating an *empty* standalone iModel.
* @see [standalone.createEmpty]($backend)
* @internal
*/
export type CreateEmptyStandaloneIModelProps = CreateIModelProps & CreateStandaloneIModelProps;
/** @public */
export interface FilePropertyProps {
readonly namespace: string;
readonly name: string;
id?: number | string;
subId?: number | string;
}
/** The position and orientation of an iModel on the earth in [ECEF](https://en.wikipedia.org/wiki/ECEF) (Earth Centered Earth Fixed) coordinates
* @see [GeoLocation of iModels]($docs/learning/GeoLocation.md)
* @public
*/
export class EcefLocation implements EcefLocationProps {
/** The origin of the ECEF transform. */
public readonly origin: Point3d;
/** The orientation of the ECEF transform */
public readonly orientation: YawPitchRollAngles;
/** Optional position on the earth used to establish the ECEF origin and orientation. */
public readonly cartographicOrigin?: Cartographic;
/** Optional X column vector used with [[yVector]] to calculate potentially non-rigid transform if a projection is present. */
public readonly xVector?: Vector3d;
/** Optional Y column vector used with [[xVector]] to calculate potentially non-rigid transform if a projection is present. */
public readonly yVector?: Vector3d;
private _transform: Transform;
/** Get the transform from iModel Spatial coordinates to ECEF from this EcefLocation */
public getTransform(): Transform { return this._transform; }
/** Construct a new EcefLocation. Once constructed, it is frozen and cannot be modified. */
constructor(props: EcefLocationProps) {
this.origin = Point3d.fromJSON(props.origin).freeze();
this.orientation = YawPitchRollAngles.fromJSON(props.orientation).freeze();
if (props.cartographicOrigin)
this.cartographicOrigin = Cartographic.fromRadians({ longitude: props.cartographicOrigin.longitude, latitude: props.cartographicOrigin.latitude, height: props.cartographicOrigin.height }).freeze();
if (props.xVector && props.yVector) {
this.xVector = Vector3d.fromJSON(props.xVector);
this.yVector = Vector3d.fromJSON(props.yVector);
}
let matrix;
if (this.xVector && this.yVector) {
const zVector = this.xVector.crossProduct(this.yVector);
if (zVector.normalizeInPlace())
matrix = Matrix3d.createColumns(this.xVector, this.yVector, zVector);
}
if (!matrix)
matrix = this.orientation.toMatrix3d();
this._transform = Transform.createOriginAndMatrix(this.origin, matrix);
}
/** Construct ECEF Location from cartographic origin with optional known point and angle. */
public static createFromCartographicOrigin(origin: Cartographic, point?: Point3d, angle?: Angle) {
const ecefOrigin = origin.toEcef();
const deltaRadians = 10 / Constant.earthRadiusWGS84.polar;
const northCarto = Cartographic.fromRadians({ longitude: origin.longitude, latitude: origin.latitude + deltaRadians, height: origin.height });
const eastCarto = Cartographic.fromRadians({ longitude: origin.longitude + deltaRadians, latitude: origin.latitude, height: origin.height });
const ecefNorth = northCarto.toEcef();
const ecefEast = eastCarto.toEcef();
const xVector = Vector3d.createStartEnd(ecefOrigin, ecefEast).normalize();
const yVector = Vector3d.createStartEnd(ecefOrigin, ecefNorth).normalize();
const matrix = Matrix3d.createRigidFromColumns(xVector!, yVector!, AxisOrder.XYZ)!;
if (angle !== undefined) {
const north = Matrix3d.createRotationAroundAxisIndex(AxisIndex.Z, angle);
matrix.multiplyMatrixMatrix(north, matrix);
}
if (point !== undefined) {
const delta = matrix.multiplyVector(Vector3d.create(-point.x, -point.y, -point.z));
ecefOrigin.addInPlace(delta);
}
return new EcefLocation({ origin: ecefOrigin, orientation: YawPitchRollAngles.createFromMatrix3d(matrix)!, cartographicOrigin: origin });
}
/** Get the location center of the earth in the iModel coordinate system. */
public get earthCenter(): Point3d {
const matrix = this.orientation.toMatrix3d();
return Point3d.createFrom(matrix.multiplyTransposeXYZ(-this.origin.x, -this.origin.y, -this.origin.z));
}
/** Return true if this location is equivalent to another location within a small tolerance. */
public isAlmostEqual(other: EcefLocation): boolean {
if (!this.origin.isAlmostEqual(other.origin) || !this.orientation.isAlmostEqual(other.orientation))
return false;
if ((this.xVector === undefined) !== (other.xVector === undefined) || (this.yVector === undefined) !== (other.yVector === undefined))
return false;
if (this.xVector !== undefined && other.xVector !== undefined && !this.xVector.isAlmostEqual(other.xVector))
return false;
if (this.yVector !== undefined && other.yVector !== undefined && !this.yVector.isAlmostEqual(other.yVector))
return false;
const thisCarto = this.cartographicOrigin;
const otherCarto = other.cartographicOrigin;
if (undefined === thisCarto || undefined === otherCarto)
return undefined === thisCarto && undefined === otherCarto;
return thisCarto.equalsEpsilon(otherCarto, Geometry.smallMetricDistance);
}
public toJSON(): EcefLocationProps {
const props: Mutable<EcefLocationProps> = {
origin: this.origin.toJSON(),
orientation: this.orientation.toJSON(),
};
if (this.cartographicOrigin)
props.cartographicOrigin = this.cartographicOrigin.toJSON();
if (this.xVector)
props.xVector = this.xVector.toJSON();
if (this.yVector)
props.yVector = this.yVector.toJSON();
return props;
}
}
/** Represents an iModel in JavaScript.
* @see [GeoLocation of iModels]($docs/learning/GeoLocation.md)
* @public
*/
export abstract class IModel implements IModelProps {
private _projectExtents?: AxisAlignedBox3d;
private _name?: string;
private _rootSubject?: RootSubjectProps;
private _globalOrigin?: Point3d;
private _ecefLocation?: EcefLocation;
private _ecefTrans?: Transform;
private _geographicCoordinateSystem?: GeographicCRS;
private _iModelId?: GuidString;
/** The Id of the repository model. */
public static readonly repositoryModelId: Id64String = "0x1";
/** The Id of the root subject element. */
public static readonly rootSubjectId: Id64String = "0x1";
/** The Id of the dictionary model. */
public static readonly dictionaryId: Id64String = "0x10";
/** Event raised after [[name]] changes. */
public readonly onNameChanged = new BeEvent<(previousName: string) => void>();
/** Event raised after [[rootSubject]] changes. */
public readonly onRootSubjectChanged = new BeEvent<(previousSubject: RootSubjectProps) => void>();
/** Event raised after [[projectExtents]] changes. */
public readonly onProjectExtentsChanged = new BeEvent<(previousExtents: AxisAlignedBox3d) => void>();
/** Event raised after [[globalOrigin]] changes. */
public readonly onGlobalOriginChanged = new BeEvent<(previousOrigin: Point3d) => void>();
/** Event raised after [[ecefLocation]] changes. */
public readonly onEcefLocationChanged = new BeEvent<(previousLocation: EcefLocation | undefined) => void>();
/** Event raised after [[geographicCoordinateSystem]] changes. */
public readonly onGeographicCoordinateSystemChanged = new BeEvent<(previousGCS: GeographicCRS | undefined) => void>();
/** Name of the iModel */
public get name(): string {
assert(this._name !== undefined);
return this._name;
}
public set name(name: string) {
if (name !== this._name) {
const old = this._name;
this._name = name;
if (undefined !== old)
this.onNameChanged.raiseEvent(old);
}
}
/** The name and description of the root subject of this iModel */
public get rootSubject(): RootSubjectProps {
assert(this._rootSubject !== undefined);
return this._rootSubject;
}
public set rootSubject(subject: RootSubjectProps) {
if (undefined === this._rootSubject || this._rootSubject.name !== subject.name || this._rootSubject.description !== subject.description) {
const old = this._rootSubject;
this._rootSubject = subject;
if (old)
this.onRootSubjectChanged.raiseEvent(old);
}
}
/** Returns `true` if this is a snapshot iModel. */
public abstract get isSnapshot(): boolean;
/** Returns `true` if this is a briefcase copy of an iModel that is synchronized with iModelHub. */
public abstract get isBriefcase(): boolean;
public abstract get isOpen(): boolean;
/**
* The volume, in spatial coordinates, inside which the entire project is contained.
* @note The object returned from this method is frozen. You *must* make a copy before you do anything that might attempt to modify it.
*/
public get projectExtents() {
assert(undefined !== this._projectExtents);
return this._projectExtents;
}
public set projectExtents(extents: AxisAlignedBox3d) {
// Don't allow any axis of the project extents to be less than 1 meter.
const projectExtents = extents.clone();
projectExtents.ensureMinLengths(1.0);
if (!this._projectExtents || !this._projectExtents.isAlmostEqual(projectExtents)) {
const old = this._projectExtents;
projectExtents.freeze();
this._projectExtents = projectExtents;
if (old)
this.onProjectExtentsChanged.raiseEvent(old);
}
}
/** An offset to be applied to all spatial coordinates. */
public get globalOrigin(): Point3d {
assert(this._globalOrigin !== undefined);
return this._globalOrigin;
}
public set globalOrigin(org: Point3d) {
if (!this._globalOrigin || !this._globalOrigin.isAlmostEqual(org)) {
const old = this._globalOrigin;
org.freeze();
this._globalOrigin = org;
if (old)
this.onGlobalOriginChanged.raiseEvent(old);
}
}
/** The [EcefLocation]($docs/learning/glossary#ecefLocation) of the iModel in Earth Centered Earth Fixed coordinates. */
public get ecefLocation(): EcefLocation | undefined {
return this._ecefLocation;
}
public set ecefLocation(ecefLocation: EcefLocation | undefined) {
const old = this._ecefLocation;
if (!old && !ecefLocation)
return;
else if (old && ecefLocation && old.isAlmostEqual(ecefLocation))
return;
this._ecefLocation = ecefLocation;
this.onEcefLocationChanged.raiseEvent(old);
}
/** Set the [EcefLocation]($docs/learning/glossary#ecefLocation) for this iModel. */
public setEcefLocation(ecef: EcefLocationProps): void {
this.ecefLocation = new EcefLocation(ecef);
}
/** The geographic coordinate reference system of the iModel. */
public get geographicCoordinateSystem(): GeographicCRS | undefined {
return this._geographicCoordinateSystem;
}
public set geographicCoordinateSystem(geoCRS: GeographicCRS | undefined) {
const old = this._geographicCoordinateSystem;
if (!old && !geoCRS)
return;
else if (old && geoCRS && old.equals(geoCRS))
return;
this._geographicCoordinateSystem = geoCRS;
this.onGeographicCoordinateSystemChanged.raiseEvent(old);
}
/** Sets the geographic coordinate reference system from GeographicCRSProps. */
public setGeographicCoordinateSystem(geoCRS: GeographicCRSProps) {
this.geographicCoordinateSystem = new GeographicCRS(geoCRS);
}
/** @internal */
public getConnectionProps(): IModelConnectionProps {
return {
name: this.name,
rootSubject: this.rootSubject,
projectExtents: this.projectExtents.toJSON(),
globalOrigin: this.globalOrigin.toJSON(),
ecefLocation: this.ecefLocation,
geographicCoordinateSystem: this.geographicCoordinateSystem,
... this.getRpcProps(),
};
}
/** @internal */
public toJSON(): IModelConnectionProps {
return this.getConnectionProps();
}
/** A key used to identify this iModel in RPC calls from frontend to backend.
* @internal
*/
protected _fileKey: string;
/** Get the key that was used to open this iModel. This is the value used for Rpc and Ipc communications. */
public get key(): string { return this._fileKey; }
/** @internal */
protected _iTwinId?: GuidString;
/** The Guid that identifies the iTwin that owns this iModel. */
public get iTwinId(): GuidString | undefined { return this._iTwinId; }
/** The Guid that identifies this iModel. */
public get iModelId(): GuidString | undefined { return this._iModelId; }
/** @public */
public changeset: ChangesetIdWithIndex;
protected _openMode = OpenMode.Readonly;
/** The [[OpenMode]] used for this IModel. */
public get openMode(): OpenMode { return this._openMode; }
/** Return a token for RPC operations. */
public getRpcProps(): IModelRpcProps {
if (!this.isOpen)
throw new IModelError(IModelStatus.BadRequest, "IModel is not open for rpc");
return {
key: this._fileKey,
iTwinId: this.iTwinId,
iModelId: this.iModelId,
changeset: this.changeset,
};
}
/** @internal */
protected constructor(tokenProps?: IModelRpcProps) {
this.changeset = { id: "", index: 0 };
this._fileKey = "";
if (tokenProps) {
this._fileKey = tokenProps.key;
this._iTwinId = tokenProps.iTwinId;
this._iModelId = tokenProps.iModelId;
if (tokenProps.changeset)
this.changeset = tokenProps.changeset;
}
}
/** @internal */
protected initialize(name: string, props: IModelProps) {
this.name = name;
this.rootSubject = props.rootSubject;
this.projectExtents = Range3d.fromJSON(props.projectExtents);
this.globalOrigin = Point3d.fromJSON(props.globalOrigin);
this.ecefLocation = props.ecefLocation ? new EcefLocation(props.ecefLocation) : undefined;
this.geographicCoordinateSystem = props.geographicCoordinateSystem ? new GeographicCRS(props.geographicCoordinateSystem) : undefined;
}
/** Get the default subCategoryId for the supplied categoryId */
public static getDefaultSubCategoryId(categoryId: Id64String): Id64String {
return Id64.isValid(categoryId) ? Id64.fromLocalAndBriefcaseIds(Id64.getLocalId(categoryId) + 1, Id64.getBriefcaseId(categoryId)) : Id64.invalid;
}
/** True if this iModel has an [EcefLocation]($docs/learning/glossary#ecefLocation). */
public get isGeoLocated() { return undefined !== this._ecefLocation; }
/** Get the Transform from this iModel's Spatial coordinates to ECEF coordinates using its [[IModel.ecefLocation]].
* @throws IModelError if [[isGeoLocated]] is false.
*/
public getEcefTransform(): Transform {
if (undefined === this._ecefLocation)
throw new IModelError(GeoServiceStatus.NoGeoLocation, "iModel is not GeoLocated");
if (this._ecefTrans === undefined) {
this._ecefTrans = this._ecefLocation.getTransform();
this._ecefTrans.freeze();
}
return this._ecefTrans;
}
/** Convert a point in this iModel's Spatial coordinates to an ECEF point using its [[IModel.ecefLocation]].
* @param spatial A point in the iModel's spatial coordinates
* @param result If defined, use this for output
* @returns A Point3d in ECEF coordinates
* @throws IModelError if [[isGeoLocated]] is false.
*/
public spatialToEcef(spatial: XYAndZ, result?: Point3d): Point3d { return this.getEcefTransform().multiplyPoint3d(spatial, result)!; }
/** Convert a point in ECEF coordinates to a point in this iModel's Spatial coordinates using its [[ecefLocation]].
* @param ecef A point in ECEF coordinates
* @param result If defined, use this for output
* @returns A Point3d in this iModel's spatial coordinates
* @throws IModelError if [[isGeoLocated]] is false.
* @note The resultant point will only be meaningful if the ECEF coordinate is close on the earth to the iModel.
*/
public ecefToSpatial(ecef: XYAndZ, result?: Point3d): Point3d { return this.getEcefTransform().multiplyInversePoint3d(ecef, result)!; }
/** Convert a point in this iModel's Spatial coordinates to a [[Cartographic]] using its [[IModel.ecefLocation]].
* @param spatial A point in the iModel's spatial coordinates
* @param result If defined, use this for output
* @returns A Cartographic location
* @throws IModelError if [[isGeoLocated]] is false.
*/
public spatialToCartographicFromEcef(spatial: XYAndZ, result?: Cartographic): Cartographic { return Cartographic.fromEcef(this.spatialToEcef(spatial), result)!; }
/** Convert a [[Cartographic]] to a point in this iModel's Spatial coordinates using its [[IModel.ecefLocation]].
* @param cartographic A cartographic location
* @param result If defined, use this for output
* @returns A point in this iModel's spatial coordinates
* @throws IModelError if [[isGeoLocated]] is false.
* @note The resultant point will only be meaningful if the ECEF coordinate is close on the earth to the iModel.
*/
public cartographicToSpatialFromEcef(cartographic: Cartographic, result?: Point3d) { return this.ecefToSpatial(cartographic.toEcef(result), result); }
} | the_stack |
/// <reference types="node" />
import { CoreOptions, RequestResponse } from "request";
import { ReadStream } from "fs";
declare class JiraApi {
private protocol: string;
private host: string;
private port: string | null;
private apiVersion: string;
private base: string;
private intermediatePath?: string | undefined;
private strictSSL: boolean;
private webhookVersion: string;
private greenhopperVersion: string;
constructor(options: JiraApi.JiraApiOptions);
/**
* Find an issue in jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290709)
* @param issueNumber - The issue number to search for including the project key
* @param expand - The resource expansion to return additional fields in the response
* @param fields - Comma separated list of field ids or keys to retrieve
* @param properties - Comma separated list of properties to retrieve
* @param fieldsByKeys - False by default, used to retrieve fields by key instead of id
*/
findIssue(
issueNumber: string,
expand?: string,
fields?: string,
properties?: string,
fieldsByKeys?: boolean
): Promise<JiraApi.JsonResponse>;
/**
* Download an attachment
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id288524)
* @param attachment - the attachment
*/
downloadAttachment(attachment: object): Promise<JiraApi.JsonResponse>;
/**
* Get the unresolved issue count
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id288524)
* @param version - the version of your product you want to find the unresolved
* issues of.
*/
getUnresolvedIssueCount(version: string): Promise<number>;
/**
* Get the Project by project key
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id289232)
* @param project - key for the project
*/
getProject(project: string): Promise<JiraApi.JsonResponse>;
/**
* Create a new Project
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#api/2/project-createProject)
* @param project - with specs
*/
createProject(project: JiraApi.ProjectObject): Promise<JiraApi.JsonResponse>;
/**
* Find the Rapid View for a specified project
* @param projectName - name for the project
*/
findRapidView(projectName: string): Promise<JiraApi.JsonResponse[]>;
/**
* Get the most recent sprint for a given rapidViewId
* @param rapidViewId - the id for the rapid view
*/
getLastSprintForRapidView(rapidViewId: string): Promise<JiraApi.JsonResponse>;
/**
* Get the issues for a rapidView / sprint
* @param rapidViewId - the id for the rapid view
* @param sprintId - the id for the sprint
*/
getSprintIssues(rapidViewId: string, sprintId: string): Promise<JiraApi.JsonResponse>;
/**
* Get a list of Sprints belonging to a Rapid View
* @param rapidViewId - the id for the rapid view
*/
listSprints(rapidViewId: string): Promise<JiraApi.JsonResponse>;
/**
* Add an issue to the project's current sprint
* @param issueId - the id of the existing issue
* @param sprintId - the id of the sprint to add it to
*/
addIssueToSprint(issueId: string, sprintId: string): Promise<JiraApi.JsonResponse>;
/**
* Create an issue link between two issues
* @param link - a link object formatted how the Jira API specifies
*/
issueLink(link: JiraApi.LinkObject): Promise<JiraApi.JsonResponse>;
/**
* List all issue link types jira knows about
* [Jira Doc](https://docs.atlassian.com/software/jira/docs/api/REST/8.5.0/#api/2/issueLinkType-getIssueLinkTypes)
*/
listIssueLinkTypes(): Promise<JiraApi.JsonResponse>;
/**
* Retrieves the remote links associated with the given issue.
* @param issueNumber - the issue number to find remote links for.
*/
getRemoteLinks(issueNumber: string): Promise<JiraApi.JsonResponse>;
/**
* Creates a remote link associated with the given issue.
* @param issueNumber - The issue number to create the remotelink under
* @param remoteLink - the remotelink object as specified by the Jira API
*/
createRemoteLink(issueNumber: string, remoteLink: JiraApi.LinkObject): Promise<JiraApi.JsonResponse>;
/**
* Get Versions for a project
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id289653)
* @param project - A project key to get versions for
*/
getVersions(project: string): Promise<JiraApi.JsonResponse>;
/**
* Get details of single Version in project
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/version-getVersion)
* @param version - The id of this version
*/
getVersion(version: string): Promise<JiraApi.JsonResponse>;
/**
* Create a version
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id288232)
* @param version - an object of the new version
*/
createVersion(version: JiraApi.VersionObject): Promise<JiraApi.JsonResponse>;
/**
* Update a version
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#d2e510)
* @param version - an new object of the version to update
*/
updateVersion(version: JiraApi.VersionObject): Promise<JiraApi.JsonResponse>;
/**
* Delete a version
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#api/2/version-delete)
* @param versionId - the ID of the version to delete
* @param moveFixIssuesToId - when provided, existing fixVersions will be moved
* to this ID. Otherwise, the deleted version will be removed from all
* issue fixVersions.
* @param moveAffectedIssuesToId - when provided, existing affectedVersions will
* be moved to this ID. Otherwise, the deleted version will be removed
* from all issue affectedVersions.
*/
deleteVersion(
versionId: string,
moveFixIssuesToId: string,
moveAffectedIssuesToId: string
): Promise<JiraApi.JsonResponse>;
/**
* Move version
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/version-moveVersion)
* @param versionId - the ID of the version to delete
* @param position - an object of the new position
*/
moveVersion(versionId: string, position: string): Promise<JiraApi.JsonResponse>;
/**
* Pass a search query to Jira
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#d2e4424)
* @param searchString - jira query string in JQL
* @param optional - object containing any of the following properties
*/
searchJira(searchString: string, optional?: JiraApi.SearchQuery): Promise<JiraApi.JsonResponse>;
/**
* Create a Jira user
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/user-createUser)
* @param user - Properly Formatted User object
*/
createUser(user: JiraApi.UserObject): Promise<JiraApi.JsonResponse>;
/**
* Search user on Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#d2e3756)
* @param options
*/
searchUsers(options: JiraApi.SearchUserOptions): Promise<JiraApi.JsonResponse>;
/**
* Get all users in group on Jira
* @param groupname - A query string used to search users in group
* @param [startAt=0] - The index of the first user to return (0-based)
* @param [maxResults=50] - The maximum number of users to return (defaults to 50).
*/
getUsersInGroup(groupname: string, startAt?: number, maxResults?: number): Promise<JiraApi.JsonResponse>;
/**
* Get issues related to a user
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id296043)
* @param username - username of user to search for
* @param open - determines if only open issues should be returned
*/
getUsersIssues(username: string, open: boolean): Promise<JiraApi.JsonResponse>;
/**
* Returns a user.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-user-get)
* @param accountId - The accountId of user to search for
* @param expand - The expand for additional info (groups,applicationRoles)
*/
getUser(accountId: string, expand: string): Promise<JiraApi.JsonResponse>;
/**
* Returns a list of all (active and inactive) users.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-users-search-get)
* @param [startAt=0] - The index of the first user to return (0-based)
* @param [maxResults=50] - The maximum number of users to return (defaults to 50).
*/
getUsers(startAt?: number, maxResults?: number): Promise<JiraApi.JsonResponse[]>;
/**
* Add issue to Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290028)
* @param issue - Properly Formatted Issue object
*/
addNewIssue(issue: JiraApi.IssueObject): Promise<JiraApi.JsonResponse>;
/**
* Add a user as a watcher on an issue
* @param issueKey - the key of the existing issue
* @param username - the jira username to add as a watcher to the issue
*/
addWatcher(issueKey: string, username: string): Promise<JiraApi.JsonResponse>;
/**
* Change an assignee on an issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-assign)
* @param issueKey - the key of the existing issue
* @param assigneeName - the jira username to add as a new assignee to the issue
*/
updateAssignee(issueKey: string, assigneeName: string): Promise<JiraApi.JsonResponse>;
/**
* Change an assignee on an issue
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v2/#api-rest-api-2-issue-issueIdOrKey-assignee-put)
* @param issueKey - the key of the existing issue
* @param userId - the jira username to add as a new assignee to the issue
*/
updateAssigneeWithId(issueKey: string, userId: string): Promise<JiraApi.JsonResponse>;
/**
* Delete issue from Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290791)
* @param issueId - the Id of the issue to delete
*/
deleteIssue(issueId: string): Promise<JiraApi.JsonResponse>;
/**
* Update issue in Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290878)
* @param issueId - the Id of the issue to update
* @param issueUpdate - update Object as specified by the rest api
* @param query - adds parameters to the query string
*/
updateIssue(issueId: string, issueUpdate: JiraApi.IssueObject, query?: JiraApi.Query): Promise<JiraApi.JsonResponse>;
/**
* List Components
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290489)
* @param project - key for the project
*/
listComponents(project: string): Promise<JiraApi.JsonResponse>;
/**
* Add component to Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290028)
* @param component - Properly Formatted Component
*/
addNewComponent(component: JiraApi.ComponentObject): Promise<JiraApi.JsonResponse>;
/**
* Update Jira component
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/component-updateComponent)
* @param componentId - the Id of the component to update
* @param component - Properly Formatted Component
*/
updateComponent(componentId: string, component: JiraApi.ComponentObject): Promise<JiraApi.JsonResponse>;
/**
* Delete component from Jira
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v2/#api-api-2-component-id-delete)
* @param id - The ID of the component.
* @param moveIssuesTo - The ID of the component to replace the deleted component.
* If this value is null no replacement is made.
*/
deleteComponent(componentId: string): Promise<JiraApi.JsonResponse>;
/**
* Get count of issues assigned to the component.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v2/#api-rest-api-2-component-id-relatedIssueCounts-get)
* @param id - Component Id.
*/
relatedIssueCounts(id: string): Promise<JiraApi.JsonResponse>;
/**
* Create custom Jira field
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field-createCustomField)
* @param field - Properly formatted Field object
*/
createCustomField(field: JiraApi.FieldObject): Promise<JiraApi.JsonResponse>;
/**
* List all fields custom and not that jira knows about.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290489)
*/
listFields(): Promise<JiraApi.FieldObject[]>;
/**
* Add an option for a select list issue field.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field/{fieldKey}/option-createOption)
* @param fieldKey - the key of the select list field
* @param option - properly formatted Option object
*/
createFieldOption(fieldKey: string, option: JiraApi.FieldOptionObject): Promise<JiraApi.JsonResponse>;
/**
* Returns all options defined for a select list issue field.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field/{fieldKey}/option-getAllOptions)
* @param fieldKey - the key of the select list field
*/
listFieldOptions(fieldKey: string): Promise<JiraApi.JsonResponse>;
/**
* Creates or updates an option for a select list issue field.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field/{fieldKey}/option-putOption)
* @param fieldKey - the key of the select list field
* @param optionId - the id of the modified option
* @param option - properly formatted Option object
*/
upsertFieldOption(
fieldKey: string,
optionId: string,
option: JiraApi.FieldOptionObject): Promise<JiraApi.JsonResponse>;
/**
* Returns an option for a select list issue field.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field/{fieldKey}/option-getOption)
* @param fieldKey - the key of the select list field
* @param optionId - the id of the option
*/
getFieldOption(fieldKey: string, optionId: string): Promise<JiraApi.JsonResponse>;
/**
* Deletes an option from a select list issue field.
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#api/2/field/{fieldKey}/option-delete)
* @param fieldKey - the key of the select list field
* @param optionId - the id of the deleted option
*/
deleteFieldOption(fieldKey: string, optionId: string): Promise<JiraApi.JsonResponse>;
/**
* Get Property of Issue by Issue and Property Id
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/issue/{issueIdOrKey}/properties-getProperty)
* @param issueNumber - The issue number to search for including the project key
* @param property - The property key to search for
*/
getIssueProperty(issueNumber: string, property: string): Promise<JiraApi.JsonResponse>;
/**
* List all changes for an issue, sorted by date, starting from the latest
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/issue/{issueIdOrKey}/changelog)
* @param issueNumber - The issue number to search for including the project key
* @param [startAt=0] - optional starting index number
* @param [maxResults=50] - optional ending index number
*/
getIssueChangelog(issueNumber: string, startAt?: number, maxResults?: number): Promise<JiraApi.JsonResponse>;
/**
* List all watchers for an issue
* [Jira Doc](http://docs.atlassian.com/jira/REST/cloud/#api/2/issue-getIssueWatchers)
* @param issueNumber - The issue number to search for including the project key
*/
getIssueWatchers(issueId: string): Promise<JiraApi.JsonResponse[]>;
/**
* List all priorities jira knows about
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290489)
*/
listPriorities(): Promise<JiraApi.JsonResponse>;
/**
* List Transitions for a specific issue that are available to the current user
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290489)
* @param issueId - get transitions available for the issue
*/
listTransitions(issueId: string): Promise<JiraApi.JsonResponse>;
/**
* Transition issue in Jira
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id290489)
* @param issueId - the Id of the issue to delete
* @param issueTransition - transition object from the jira rest API
*/
transitionIssue(issueId: string, issueTransition: JiraApi.TransitionObject): Promise<JiraApi.JsonResponse>;
/**
* List all Viewable Projects
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id289193)
*/
listProjects(): Promise<JiraApi.JsonResponse[]>;
/**
* Add a comment to an issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#id108798)
* @param issueId - Issue to add a comment to
* @param comment - string containing comment
*/
addComment(issueId: string, comment: string): Promise<JiraApi.JsonResponse>;
/**
* Add a comment to an issue, supports full comment object
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#id108798)
* @param issueId - Issue to add a comment to
* @param comment - The object containing your comment data
*/
addCommentAdvanced(issueId: string, comment: JiraApi.CommentAdvancedObject): Promise<JiraApi.JsonResponse>;
/**
* Update comment for an issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-updateComment)
* @param issueId - Issue with the comment
* @param commentId - Comment that is updated
* @param comment - string containing new comment
* @param [options={}] - extra options
*/
updateComment(issueId: string, commentId: string, comment: string, options?: JiraApi.CommentOptions): Promise<JiraApi.JsonResponse>;
/**
* Get Comments by IssueId.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-comment-list-post)
* @param issueId - this issue this comment is on
*/
getComments(issueId: string): Promise<JiraApi.JsonResponse>;
/**
* Get Comment by Id.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-comment-list-post)
* @param issueId - this issue this comment is on
* @param commentId - the id of the comment
*/
getComment(issueId: string, commentId: number): Promise<JiraApi.JsonResponse>;
/**
* Delete Comments by Id.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-comment-list-post)
* @param issueId - this issue this comment is on
* @param commentId - the id of the comment
*/
deleteComment(issueId: string, commentId: number): Promise<JiraApi.JsonResponse>;
/**
* Add a worklog to a project
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id291617)
* @param issueId - Issue to add a worklog to
* @param worklog - worklog object from the rest API
* @param [optional] newEstimate - the new value for the remaining estimate field
* @param [options={}] - extra options
*/
addWorklog(
issueId: string,
worklog: JiraApi.WorklogObject,
newEstimate?: JiraApi.EstimateObject,
options?: JiraApi.WorklogOptions
): Promise<JiraApi.JsonResponse>;
/**
* Get ids of worklogs modified since
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/worklog-getWorklogsForIds)
* @param since - a date time in unix timestamp format since when updated worklogs
* will be returned.
* @param expand - ptional comma separated list of parameters to expand: properties
* (provides worklog properties).
*/
updatedWorklogs(since: number, expand: string): Promise<JiraApi.JsonResponse>;
/**
* Delete worklog from issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#d2e1673)
* @param issueId - the Id of the issue to delete
* @param worklogId - the Id of the worklog in issue to delete
*/
deleteWorklog(issueId: string, worklogId: string): Promise<JiraApi.JsonResponse>;
/**
* Deletes an issue link.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-issueLink-linkId-delete)
* @param linkId - the Id of the issue link to delete
*/
deleteIssueLink(linkId: string): Promise<JiraApi.JsonResponse>;
/**
* Returns worklog details for a list of worklog IDs.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-rest-api-3-worklog-list-post)
* @param worklogsIDs - a list of worklog IDs.
* @param expand - expand to include additional information about worklogs
*
*/
getWorklogs(worklogsIDs: string[], expand: string): Promise<JiraApi.JsonResponse[]>;
/**
* Get worklogs list from a given issue
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/#api-api-3-issue-issueIdOrKey-worklog-get)
* @param issueId - the Id of the issue to find worklogs for
* @param [startAt=0] - optional starting index number
* @param [maxResults=1000] - optional ending index number
*/
getIssueWorklogs(issueId: string): Promise<JiraApi.JsonResponse>;
/**
* List all Issue Types jira knows about
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id295946)
*/
listIssueTypes(): Promise<JiraApi.JsonResponse>;
/**
* Register a webhook
* [Jira Doc](https://developer.atlassian.com/display/JIRADEV/JIRA+Webhooks+Overview)
* @param webhook - properly formatted webhook
*/
registerWebhook(webhook: JiraApi.WebhookObject): Promise<JiraApi.JsonResponse>;
/**
* List all registered webhooks
* [Jira Doc](https://developer.atlassian.com/display/JIRADEV/JIRA+Webhooks+Overview)
*/
listWebhooks(): Promise<JiraApi.JsonResponse>;
/**
* Get a webhook by its ID
* [Jira Doc](https://developer.atlassian.com/display/JIRADEV/JIRA+Webhooks+Overview)
* @param webhookID - id of webhook to get
*/
getWebhook(webhookID: string): Promise<JiraApi.JsonResponse>;
/**
* Delete a registered webhook
* [Jira Doc](https://developer.atlassian.com/display/JIRADEV/JIRA+Webhooks+Overview)
* @param webhookID - id of the webhook to delete
*/
deleteWebhook(webhookID: string): Promise<JiraApi.JsonResponse>;
/**
* Describe the currently authenticated user
* [Jira Doc](http://docs.atlassian.com/jira/REST/latest/#id2e865)
*/
getCurrentUser(): Promise<JiraApi.JsonResponse>;
/**
* Retrieve the backlog of a certain Rapid View
* @param rapidViewId - rapid view id
*/
getBacklogForRapidView(rapidViewId: string): Promise<JiraApi.JsonResponse>;
/**
* Add attachment to a Issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#api/2/issue/{issueIdOrKey}/attachments-addAttachment)
* @param issueId - issue id
* @param readStream - readStream object from fs
*/
addAttachmentOnIssue(issueId: string, readStream: ReadStream): Promise<JiraApi.JsonResponse>;
/**
* Notify people related to issue
* [Jira Doc](https://docs.atlassian.com/jira/REST/cloud/#api/2/issue-notify)
* @param issueId - issue id
* @param notificationBody - properly formatted body
*/
issueNotify(issueId: string, notificationBody: JiraApi.NotificationObject): Promise<JiraApi.JsonResponse>;
/**
* Get list of possible statuses
* [Jira Doc](https://docs.atlassian.com/jira/REST/latest/#api/2/status-getStatuses)
*/
listStatus(): Promise<JiraApi.JsonResponse>;
/**
* Get a Dev-Status summary by issue ID
* @param issueId - id of issue to get
*/
getDevStatusSummary(issueId: string): Promise<JiraApi.JsonResponse>;
/**
* Get a Dev-Status detail by issue ID
* @param issueId - id of issue to get
* @param applicationType - type of application (stash, bitbucket)
* @param dataType - info to return (repository, pullrequest)
*/
getDevStatusDetail(issueId: string, applicationType: string, dataType: string): Promise<JiraApi.JsonResponse>;
/**
* Get issue
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/issue-getIssue)
* @param issueIdOrKey - Id of issue
* @param [fields] - [optional] The list of fields to return for each issue.
* @param [expand] - [optional] A comma-separated list of the parameters to expand.
*/
getIssue(issueIdOrKey: string, fields?: string | string[], expand?: string): Promise<JiraApi.JsonResponse>;
/**
* Move issues to backlog
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/backlog-moveIssuesToBacklog)
* @param issues - id or key of issues to get
*/
moveToBacklog(issues: string[]): Promise<JiraApi.JsonResponse>;
/**
* Get all boards
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-getAllBoards)
* @param [startAt=0] - The starting index of the returned boards.
* @param [maxResults=50] - The maximum number of boards to return per page.
* @param [type] - Filters results to boards of the specified type.
* @param [name] - Filters results to boards that match the specified name.
* @param [projectKeyOrId] - Filters results to boards that are relevant to a project.
*/
getAllBoards(
startAt?: number,
maxResults?: number,
type?: string,
name?: string,
projectKeyOrId?: string
): Promise<JiraApi.JsonResponse>;
/**
* Create Board
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-createBoard)
* @param boardBody - Board name, type and filter Id is required.
*/
createBoard(boardBody: JiraApi.BoardObject): Promise<JiraApi.JsonResponse>;
/**
* Get Board
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-getBoard)
* @param boardId - Id of board to retrieve
*/
getBoard(boardId: string): Promise<JiraApi.JsonResponse>;
/**
* Delete Board
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-deleteBoard)
* @param boardId - Id of board to retrieve
*/
deleteBoard(boardId: string): Promise<JiraApi.JsonResponse>;
/**
* Get issues for backlog
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-getIssuesForBacklog)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned issues. Base index: 0.
* @param [maxResults=50] - The maximum number of issues to return per page. Default: 50.
* @param [jql] - Filters results using a JQL query.
* @param [validateQuery] - Specifies whether to validate the JQL query or not.
* Default: true.
* @param [fields] - The list of fields to return for each issue.
*/
getIssuesForBacklog(
boardId: string,
startAt?: number,
maxResults?: number,
jql?: string,
validateQuery?: boolean,
fields?: string
): Promise<JiraApi.JsonResponse>;
/**
* Get Configuration
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-getConfiguration)
* @param boardId - Id of board to retrieve
*/
getConfiguration(boardId: string): Promise<JiraApi.JsonResponse>;
/**
* Get issues for board
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board-getIssuesForBoard)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned issues. Base index: 0.
* @param [maxResults=50] - The maximum number of issues to return per page. Default: 50.
* @param [jql] - Filters results using a JQL query.
* @param [validateQuery] - Specifies whether to validate the JQL query or not.
* Default: true.
* @param [fields] - The list of fields to return for each issue.
*/
getIssuesForBoard(
boardId: string,
startAt?: number,
maxResults?: number,
jql?: string,
validateQuery?: boolean,
fields?: string
): Promise<JiraApi.JsonResponse>;
/**
* Get Epics
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/epic-getEpics)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned epics. Base index: 0.
* @param [maxResults=50] - The maximum number of epics to return per page. Default: 50.
* @param [done] - Filters results to epics that are either done or not done.
* Valid values: true, false.
*/
getEpics(
boardId: string,
startAt?: number,
maxResults?: number,
done?: "true" | "false"
): Promise<JiraApi.JsonResponse>;
/**
* Get board issues for epic
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/epic-getIssuesForEpic)
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/epic-getIssuesWithoutEpic)
* @param boardId - Id of board to retrieve
* @param epicId - Id of epic to retrieve, specify 'none' to get issues without an epic.
* @param [startAt=0] - The starting index of the returned issues. Base index: 0.
* @param [maxResults=50] - The maximum number of issues to return per page. Default: 50.
* @param [jql] - Filters results using a JQL query.
* @param [validateQuery] - Specifies whether to validate the JQL query or not.
* Default: true.
* @param [fields] - The list of fields to return for each issue.
*/
getBoardIssuesForEpic(
boardId: string,
epicId: string,
startAt?: number,
maxResults?: number,
jql?: string,
validateQuery?: boolean,
fields?: string
): Promise<JiraApi.JsonResponse>;
/**
* Estimate issue for board
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/issue-estimateIssueForBoard)
* @param issueIdOrKey - Id of issue
* @param boardId - The id of the board required to determine which field
* is used for estimation.
* @param body - value to set
*/
estimateIssueForBoard(issueIdOrKey: string, boardId: number, body: string): Promise<JiraApi.JsonResponse>;
/**
* Rank Issues
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/issue-rankIssues)
* @param body - value to set
*/
rankIssues(body: string): Promise<JiraApi.JsonResponse>;
/**
* Get Projects
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/project-getProjects)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned projects. Base index: 0.
* @param [maxResults=50] - The maximum number of projects to return per page.
* Default: 50.
*/
getProjects(boardId: string, startAt?: number, maxResults?: number): Promise<JiraApi.JsonResponse>;
/**
* Get Projects Full
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/project-getProjectsFull)
* @param boardId - Id of board to retrieve
*/
getProjectsFull(boardId: string): Promise<JiraApi.JsonResponse>;
/**
* Get Board Properties Keys
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/properties-getPropertiesKeys)
* @param boardId - Id of board to retrieve
*/
getBoardPropertiesKeys(boardId: string): Promise<JiraApi.JsonResponse>;
/**
* Delete Board Property
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/properties-deleteProperty)
* @param boardId - Id of board to retrieve
* @param propertyKey - Id of property to delete
*/
deleteBoardProperty(boardId: string, propertyKey: string): Promise<JiraApi.JsonResponse>;
/**
* Set Board Property
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/properties-setProperty)
* @param boardId - Id of board to retrieve
* @param propertyKey - Id of property to delete
* @param body - value to set, for objects make sure to stringify first
*/
setBoardProperty(boardId: string, propertyKey: string, body: string): Promise<JiraApi.JsonResponse>;
/**
* Get Board Property
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/properties-getProperty)
* @param boardId - Id of board to retrieve
* @param propertyKey - Id of property to retrieve
*/
getBoardProperty(boardId: string, propertyKey: string): Promise<JiraApi.JsonResponse>;
/**
* Get All Sprints
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/sprint-getAllSprints)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned sprints. Base index: 0.
* @param [maxResults=50] - The maximum number of sprints to return per page.
* Default: 50.
* @param [state] - Filters results to sprints in specified states.
* Valid values: future, active, closed.
*/
getAllSprints(
boardId: string,
startAt?: number,
maxResults?: number,
state?: "future" | "active" | "closed"
): Promise<JiraApi.JsonResponse>;
/**
* Get Board issues for sprint
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/sprint-getIssuesForSprint)
* @param boardId - Id of board to retrieve
* @param sprintId - Id of sprint to retrieve
* @param [startAt=0] - The starting index of the returned issues. Base index: 0.
* @param [maxResults=50] - The maximum number of issues to return per page. Default: 50.
* @param [jql] - Filters results using a JQL query.
* @param [validateQuery] - Specifies whether to validate the JQL query or not.
* Default: true.
* @param [fields] - The list of fields to return for each issue.
*/
getBoardIssuesForSprint(
boardId: string,
sprintId: string,
startAt?: number,
maxResults?: number,
jql?: string,
validateQuery?: boolean,
fields?: string
): Promise<JiraApi.JsonResponse>;
/**
* Get All Versions
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/board/{boardId}/version-getAllVersions)
* @param boardId - Id of board to retrieve
* @param [startAt=0] - The starting index of the returned versions. Base index: 0.
* @param [maxResults=50] - The maximum number of versions to return per page.
* Default: 50.
* @param [released] - Filters results to versions that are either released or
* unreleased.Valid values: true, false.
*/
getAllVersions(
boardId: string,
startAt?: number,
maxResults?: number,
released?: "true" | "false"
): Promise<JiraApi.JsonResponse>;
/**
* Get Filter
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/filter)
* @param filterId - Id of filter to retrieve
*/
getFilter(filterId: string): Promise<JiraApi.JsonResponse>;
/**
* Get Epic
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-getEpic)
* @param epicIdOrKey - Id of epic to retrieve
*/
getEpic(epicIdOrKey: string): Promise<JiraApi.JsonResponse>;
/**
* Partially update epic
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-partiallyUpdateEpic)
* @param epicIdOrKey - Id of epic to retrieve
* @param body - value to set, for objects make sure to stringify first
*/
partiallyUpdateEpic(epicIdOrKey: string, body: string): Promise<JiraApi.JsonResponse>;
/**
* Get issues for epic
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-getIssuesForEpic)
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-getIssuesWithoutEpic)
* @param epicId - Id of epic to retrieve, specify 'none' to get issues without an epic.
* @param [startAt=0] - The starting index of the returned issues. Base index: 0.
* @param [maxResults=50] - The maximum number of issues to return per page. Default: 50.
* @param [jql] - Filters results using a JQL query.
* @param [validateQuery] - Specifies whether to validate the JQL query or not.
* Default: true.
* @param [fields] - The list of fields to return for each issue.
*/
getIssuesForEpic(epicId: string, startAt?: number, maxResults?: number, jql?: string, validateQuery?: boolean, fields?: string): Promise<JiraApi.JsonResponse>;
/**
* Move Issues to Epic
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-moveIssuesToEpic)
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-removeIssuesFromEpic)
* @param epicIdOrKey - Id of epic to move issue to, or 'none' to remove from epic
* @param issues - array of issues to move
*/
moveIssuesToEpic(epicIdOrKey: string, issues: string[]): Promise<JiraApi.JsonResponse>;
/**
* Rank Epics
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/#agile/1.0/epic-rankEpics)
* @param epicIdOrKey - Id of epic
* @param body - value to set (stringify first)
*/
rankEpics(epicIdOrKey: string, body: string): Promise<JiraApi.JsonResponse>;
/**
* Get server info
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v2/#api-api-2-serverInfo-get)
*/
getServerInfo(): Promise<JiraApi.JsonResponse>;
/**
* @param optional - object containing any of the following properties
* Get metadata for creating an issue.
* [Jira Doc](https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issues/#api-rest-api-3-issue-createmeta-get)
*/
getIssueCreateMetadata(optional?: JiraApi.CreateIssueMetadataObject): Promise<JiraApi.JsonResponse>;
/**
* Generic Get Request
* [Jira Doc](https://docs.atlassian.com/jira-software/REST/cloud/2/)
* @param endpoint - Rest API endpoint
*/
genericGet(endpoint: string): Promise<JiraApi.JsonResponse>;
private makeRequestHeader(uri: string, options?: JiraApi.UriOptions);
private makeUri(options: JiraApi.UriOptions): string;
private makeWebhookUri(options: JiraApi.UriOptions): string;
private makeSprintQueryUri(options: JiraApi.UriOptions): string;
private makeDevStatusUri(options: JiraApi.UriOptions): string;
private makeAgileUri(options: JiraApi.UriOptions): string;
private doRequest(requestOptions: CoreOptions): Promise<RequestResponse>;
}
declare namespace JiraApi {
interface JiraApiOptions {
protocol?: string | undefined;
host: string;
port?: string | undefined;
username?: string | undefined;
password?: string | undefined;
apiVersion?: string | undefined;
base?: string | undefined;
intermediatePath?: string | undefined;
strictSSL?: boolean | undefined;
request?: any;
timeout?: number | undefined;
webhookVersion?: string | undefined;
greenhopperVersion?: string | undefined;
bearer?: string | undefined;
oauth?: OAuth | undefined;
ca?: string | undefined;
}
interface OAuth {
consumer_key: string;
consumer_secret: string;
access_token: string;
access_token_secret: string;
signature_method?: string | undefined;
}
interface LinkObject {
[name: string]: any;
}
interface Query {
[name: string]: any;
}
interface JsonResponse {
[name: string]: any;
}
interface UserObject {
[name: string]: any;
}
interface IssueObject {
[name: string]: any;
}
interface ComponentObject {
[name: string]: any;
}
interface FieldObject {
[name: string]: any;
}
interface FieldOptionObject {
[name: string]: any;
}
interface TransitionObject {
[name: string]: any;
}
interface WorklogObject {
[name: string]: any;
}
interface EstimateObject {
[name: string]: any;
}
interface WebhookObject {
[name: string]: any;
}
interface NotificationObject {
[name: string]: any;
}
interface BoardObject {
/** Valid values: scrum, kanban */
type: 'scrum' | 'kanban';
/** Must be less than 255 characters. */
name: string;
/** Id of a filter that the user has permissions to view. */
filterId: string;
}
interface AttachmentObject {
[name: string]: any;
}
interface ProjectObject {
[name: string]: any;
}
interface VersionObject {
[name: string]: any;
}
interface CommentAdvancedObject {
[name: string]: any;
}
interface CommentOptions {
[name: string]: any;
}
interface WorklogOptions {
[name: string]: any;
}
interface CreateIssueMetadataObject {
/** [optional] Array of project ids to return metadata for */
projectIds?: string[] | undefined;
/** [optional] Array of project keys to return metadata for */
projectKeys?: string[] | undefined;
/** [optional] Array of issuetype ids to return metadata for */
issuetypeIds?: string[] | undefined;
/** [optional] Array of issuetype names to return metadata for */
issuetypeNames?: string[] | undefined;
/** [optional] Include additional information about issue metadata. Valid value is 'projects.issuetypes.fields' */
expand?: string | undefined;
}
interface SearchUserOptions {
/** (DEPRECATED) A query string used to search username, name or e-mail address */
username: string;
/**
* A query string that is matched against user attributes
* (displayName, and emailAddress) to find relevant users. The string can match the prefix of
* the attribute's value. For example, query=john matches a user with a displayName of John
* Smith and a user with an emailAddress of johnson@example.com. Required, unless accountId
* or property is specified.
*/
query: string;
/** [optional - default = 0] The index of the first user to return (0-based) */
startAt?: number | undefined;
/** [optional - default = 50] The maximum number of users to return */
maxResults?: number | undefined;
/** [optional - default = true] If true, then active users are included in the results */
includeActive?: boolean | undefined;
/** [optional - default = true] If true, then inactive users are included in the results */
includeInactive?: boolean | undefined;
}
interface SearchQuery {
/** [optional - default = 0] starting index number */
startAt?: number | undefined;
/**
* [optional - default = 50] The maximum number of items to
* return per page. To manage page size, Jira may return fewer items per
* page where a large number of fields are requested.
*/
maxResults?: number | undefined;
/** [optional] array of string names of desired fields */
fields?: string[] | undefined;
/** [optional] array of string names of desired expand nodes */
expand?: string[] | undefined;
}
interface UriOptions {
pathname: string;
query?: Query | undefined;
intermediatePath?: string | undefined;
}
}
export = JiraApi; | the_stack |
import { QueryAble, ResultFieldType, IPgDb,PostProcessResultFunc } from "./queryAble";
import { PgTable } from "./pgTable";
import { PgSchema } from "./pgSchema";
import * as PgConverters from "./pgConverters";
import { pgUtils } from "./pgUtils";
import * as _ from 'lodash';
import * as pg from 'pg';
import * as readline from 'readline';
import * as fs from 'fs';
import { PgDbLogger } from './pgDbLogger';
import { ConnectionOptions } from './connectionOptions';
import * as EventEmitter from 'events';
const CONNECTION_URL_REGEXP = /^postgres:\/\/(?:([^:]+)(?::([^@]*))?@)?([^\/:]+)?(?::([^\/]+))?\/(.*)$/;
const SQL_TOKENIZER_REGEXP = /''|'|""|"|;|\$|--|\/\*|\*\/|(.+?)/g;
const SQL_$_ESCAPE_REGEXP = /\$[^$]*\$/g;
/** looks like we only get back those that we have access to */
const LIST_SCHEMAS_TABLES =
`SELECT table_schema as schema, table_name as name
FROM information_schema.tables
WHERE table_schema NOT IN ('pg_catalog', 'pg_constraint', 'information_schema')`;
const GET_OID_FOR_COLUMN_TYPE_FOR_SCHEMA = "SELECT t.oid FROM pg_catalog.pg_type t, pg_namespace n WHERE typname=:typeName and n.oid=t.typnamespace and n.nspname=:schemaName;";
const GET_OID_FOR_COLUMN_TYPE = "SELECT t.oid FROM pg_catalog.pg_type t WHERE typname=:typeName";
/** looks like we only get back those that we have access to */
const GET_SCHEMAS_PROCEDURES = `SELECT
n.nspname as "schema",
p.proname as "name",
(not p.proretset) as "return_single_row",
(t.typtype in ('b', 'd', 'e', 'r')) as "return_single_value"
FROM pg_proc p
inner join pg_namespace n on (p.pronamespace = n.oid)
inner join pg_type t on (p.prorettype = t.oid)
left outer join pg_trigger tr on (tr.tgfoid = p.oid)
WHERE n.nspname NOT IN ('pg_catalog', 'pg_constraint', 'information_schema')
AND tr.oid is null;`;
const GET_CURRENT_SCHEMAS = "SELECT current_schemas(false)";
//const LIST_ARRAY_TYPE_FIELDS = 'SELECT a.atttypid as oid FROM pg_attribute a WHERE a.attndims>0 AND a.atttypid>200000';
//const LIST_ARRAY_TYPE_FIELDS = 'SELECT a.atttypid as type_oid FROM pg_attribute a WHERE a.tttypid in (1005,1007,1016,1021,1022) OR (a.attndims>0 AND a.atttypid>200000)';
/*
SELECT c.nspname as schema_name, b.relname as table_name, a.attname as column_name, a.atttypid as type_oid, format_type(a.atttypid, a.atttypmod)
FROM pg_attribute a
JOIN pg_class b ON (a.attrelid = b.relfilenode)
JOIN pg_namespace c ON (b.relnamespace=c.oid)
WHERE a.attndims>0 AND a.atttypid>200000;
*/
/*
SELECT * FROM pg_catalog.pg_type t where t.typname like '%tz';
SELECT t.oid FROM pg_catalog.pg_type t WHERE t.typname in ('timestamptz', 'timetz');
reltype -> only include the table columns (this is zero for indexes)
a.attndims>0 -> not reliable (truncate/create table like.. not set it correctly)
*/
/** We get back fields as well that we don't have access to, thus we need to filter those schemas that we have permission for
* ... TODO check it for tables */
const LIST_SPECIAL_TYPE_FIELDS =
`SELECT c.nspname as schema_name, b.relname as table_name, a.attname as column_name, a.atttypid as typid
FROM pg_attribute a
JOIN pg_class b ON (a.attrelid = b.oid)
JOIN pg_type t ON (a.atttypid = t.oid)
JOIN pg_namespace c ON (b.relnamespace=c.oid)
WHERE (a.atttypid in (114, 3802, 1082, 1083, 1114, 1184, 1266, 3614) or t.typcategory='A')
AND reltype>0 `;
//AND c.nspname not in ('pg_catalog', 'pg_constraint', 'information_schema')
const TYPE2OID = `SELECT t.oid, typname FROM pg_catalog.pg_type t WHERE typname in (
'_bool',
'int8','_int2','_int4','_int8','_float4','float8','_float8',
'_text','_varchar',
'json','jsonb', '_json','_jsonb',
'date','time','timestamp','timestamptz','timetz','_date','_time','_timestamp','_timestamptz','_timetz',
'tsvector')`
export enum FieldType { JSON, ARRAY, TIME, TSVECTOR }
export enum TranzactionIsolationLevel {
serializable = 'SERIALIZABLE',
repeatableRead = 'REPEATABLE READ',
readCommitted = 'READ COMMITTED',
readUncommitted = 'READ UNCOMMITTED'
}
/** LISTEN callback parameter */
export interface Notification {
processId: number,
channel: string,
payload?: string
}
export class PgDb extends QueryAble implements IPgDb {
protected static instances: { [index: string]: Promise<PgDb> };
/*protected*/
pool;
connection;
/*protected*/
config: ConnectionOptions;
/*protected*/
defaultSchemas; // for this.tables and this.fn
db:PgDb;
schemas: { [name: string]: PgSchema };
tables: { [name: string]: PgTable<any> } = {};
fn: { [name: string]: (...any) => any } = {};
[name: string]: any | PgSchema;
/* protected */
pgdbTypeParsers:Record<string, (string) => any> = {};
/* protected */
knownOids: Record<number, boolean> = {};
/* protected */
postProcessResult: PostProcessResultFunc;
private constructor(pgdb: { defaultSchemas?, config?, schemas?, pool?, pgdbTypeParsers?, knownOids?, getLogger?: () => any, postProcessResult?: PostProcessResultFunc } = {}) {
super();
this.schemas = {};
this.config = pgdb.config;
this.pool = pgdb.pool;
this.postProcessResult = pgdb.postProcessResult;
this.pgdbTypeParsers = pgdb.pgdbTypeParsers || {};
this.knownOids = pgdb.knownOids || {};
this.db = this;
if (pgdb.getLogger) {
this.setLogger(pgdb.getLogger());
}
for (let schemaName in pgdb.schemas) {
let schema = new PgSchema(this, schemaName);
this.schemas[schemaName] = schema;
if (!(schemaName in this))
this[schemaName] = schema;
for (let tableName in pgdb.schemas[schemaName].tables) {
schema.tables[tableName] = new PgTable(schema, pgdb.schemas[schemaName][tableName].desc, pgdb.schemas[schemaName][tableName].fieldTypes);
if (!(tableName in schema))
schema[tableName] = schema.tables[tableName];
}
}
this.defaultSchemas = pgdb.defaultSchemas;
this.setDefaultTablesAndFunctions();
}
setPostProcessResult(f: (res: any[], fields: ResultFieldType[], logger: PgDbLogger) => void) {
this.postProcessResult = f;
}
/** If planned to used as a static singleton */
static async getInstance(config: ConnectionOptions): Promise<PgDb> {
if (config.connectionString) {
let res = CONNECTION_URL_REGEXP.exec(config.connectionString);
if (res) {
config.user = res[1];
config.password = res[2] ? res[2] : '';
config.host = res[3] ? res[3] : 'localhost';
config.port = res[4] ? +res[4] : 5432;
config.database = res[5];
}
}
let connectionString = `postgres://${config.user}@${config.host}:${config.port}/${config.database}`; // without password!
if (!PgDb.instances) {
PgDb.instances = {};
}
if (PgDb.instances[connectionString]) {
return PgDb.instances[connectionString];
} else {
let pgdb = new PgDb({ config: config });
PgDb.instances[connectionString] = pgdb.init();
return PgDb.instances[connectionString];
}
}
async close() {
for (let cs in PgDb.instances) {
let db = await PgDb.instances[cs];
if (db.pool == this.pool) {
delete PgDb.instances[cs];
}
}
await this.pool.end((err: Error) => { });
}
static async connect(config: ConnectionOptions): Promise<PgDb> {
if (config.connectionString) {
let res = CONNECTION_URL_REGEXP.exec(config.connectionString);
if (res) {
config.user = res[1];
if (res[2]) config.password = res[2];
config.host = res[3] ? res[3] : 'localhost';
config.port = res[4] ? +res[4] : 5432;
config.database = res[5];
}
}
let pgdb = new PgDb({ config: config });
return pgdb.init();
}
private async init(): Promise<PgDb> {
this.pool = new pg.Pool(_.omit(this.config, ['logger', 'skipUndefined']));
if (this.config.logger)
this.setLogger(this.config.logger);
this.pool.on('error', (e, client) => {
// if a client is idle in the pool
// and receives an error - for example when your PostgreSQL server restarts
// the pool will catch the error & let you handle it here
this.getLogger(true).error('pool error', e);
});
await this.reload();
this.getLogger().log('Successfully connected to Db');
return this;
}
async reload() {
await this.initSchemasAndTables();
await this.initFieldTypes();
}
private async initSchemasAndTables() {
let schemasAndTables = await this.query(LIST_SCHEMAS_TABLES);
let functions = await this.query(GET_SCHEMAS_PROCEDURES);
this.defaultSchemas = await this.queryOneField(GET_CURRENT_SCHEMAS);
let oldSchemaNames = Object.keys(this.schemas);
for (let sc of oldSchemaNames) {
if (this[sc] === this.schemas[sc])
delete this[sc];
}
this.schemas = {};
for (let r of schemasAndTables) {
let schema = this.schemas[r.schema] = this.schemas[r.schema] || new PgSchema(this, r.schema);
if (!(r.schema in this))
this[r.schema] = schema;
schema.tables[r.name] = new PgTable(schema, r);
if (!(r.name in schema))
schema[r.name] = schema.tables[r.name];
}
for (let r of functions) {
let schema = this.schemas[r.schema] = this.schemas[r.schema] || new PgSchema(this, r.schema);
if (!(r.schema in this))
this[r.schema] = schema;
schema.fn[r.name] = pgUtils.createFunctionCaller(schema, r);
}
// this.getLogger(true).log('defaultSchemas: ' + defaultSchemas);
this.setDefaultTablesAndFunctions();
}
private setDefaultTablesAndFunctions() {
this.tables = {};
this.fn = {};
if (!this.defaultSchemas) return;
for (let sc of this.defaultSchemas) {
let schema = this.schemas[sc];
// this.getLogger(true).log('copy schame to default', sc, schema && Object.keys(schema.tables), schema && Object.keys(schema.fn));
if (!schema)
continue;
for (let table in schema.tables)
this.tables[table] = this.tables[table] || schema.tables[table];
for (let fn in schema.fn)
this.fn[fn] = this.fn[fn] || schema.fn[fn];
}
}
private async initFieldTypes() {
//--- init field types -------------------------------------------
let schemaNames = "'" + Object.keys(this.schemas).join("', '") + "'";
if (schemaNames == "''") {
this.getLogger(true).error("No readable schema found!");
return;
}
let type2oid = {};
let res = await this.query(TYPE2OID);
for (let tt of res || []) {
type2oid[tt.typname] = +tt.oid;
}
let specialTypeFields: { schema_name: string, table_name: string, column_name: string, typid: number }[]
= await this.query(LIST_SPECIAL_TYPE_FIELDS + ' AND c.nspname in (' + schemaNames + ')');
for (let r of specialTypeFields) {
if (this.schemas[r.schema_name][r.table_name]) {
this.schemas[r.schema_name][r.table_name].fieldTypes[r.column_name] =
([type2oid['json'], type2oid['jsonb']].indexOf(r.typid) > -1) ? FieldType.JSON :
([type2oid['tsvector']].indexOf(r.typid) > -1) ? FieldType.TSVECTOR :
([type2oid['date'],type2oid['time'],type2oid['timestamp'],type2oid['timestamptz'],type2oid['timetz']].indexOf(r.typid) > -1) ? FieldType.TIME :
FieldType.ARRAY;
}
}
// https://web.archive.org/web/20160613215445/https://doxygen.postgresql.org/include_2catalog_2pg__type_8h_source.html
// https://github.com/lib/pq/blob/master/oid/types.go
let builtInArrayTypeParsers: { oidList: number[], parser: (string) => any }[] = [
{
oidList: [
type2oid['_bool'] // bool[]
],
parser: PgConverters.arraySplitToBool
},
{
oidList: [
type2oid['_int2'], // smallInt[] int2[]
type2oid['_int4'], // integer[] int4[]
type2oid['_float4'], // real[] float4[]
],
parser: PgConverters.arraySplitToNum
},
{
oidList: [
type2oid['_text'], // text[]
type2oid['_varchar'] // varchar[]
],
parser: PgConverters.arraySplit
},
{
oidList: [
type2oid['_json'], // json[]
type2oid['_jsonb'] // jsonb[]
],
parser: PgConverters.arraySplitToJson
},
{
oidList: [
type2oid['_date'], // date[]
type2oid['_time'],
type2oid['_timetz'],
type2oid['_timestamp'], //timestamp[]
type2oid['_timestamptz'],
],
parser: PgConverters.arraySplitToDate
}
];
builtInArrayTypeParsers.forEach(parserObj => {
parserObj.oidList.forEach(oid => {
pg.types.setTypeParser(oid, parserObj.parser);
delete this.pgdbTypeParsers[oid];
this.knownOids[oid] = true;
});
});
for (let r of specialTypeFields) {
if (this.knownOids[r.typid] && !this.pgdbTypeParsers[r.typid]) {
continue;
}
switch (r.typid) {
case type2oid['json']:
case type2oid['jsonb']:
case type2oid['date']: // date
case type2oid['time']: // time
case type2oid['timetz']: // timetz
case type2oid['timestamp']: // timestamp
case type2oid['timestamptz']: // timestamptz
case type2oid['tsvector']: // tsvector
case type2oid['_int8']: // bigInt[] int8[]
case type2oid['_float8']: // double[] float8[]
break;
default:
//best guess otherwise user need to specify
pg.types.setTypeParser(r.typid, PgConverters.arraySplit);
delete this.pgdbTypeParsers[r.typid];
}
}
this.pgdbTypeParsers[type2oid['int8']] = PgConverters.numWithValidation;
this.pgdbTypeParsers[type2oid['float8']] = PgConverters.numWithValidation;
this.pgdbTypeParsers[type2oid['_int8']] = PgConverters.stringArrayToNumWithValidation;
this.pgdbTypeParsers[type2oid['_float8']] = PgConverters.stringArrayToNumWithValidation;
this.knownOids[type2oid['int8']] = true;
this.knownOids[type2oid['float8']] = true;
this.knownOids[type2oid['_int8']] = true;
this.knownOids[type2oid['_float8']] = true;
let allUsedTypeFields = await this.queryOneColumn(`
SELECT a.atttypid as typid
FROM pg_attribute a
JOIN pg_class b ON (a.attrelid = b.oid)
JOIN pg_type t ON (a.atttypid = t.oid)
JOIN pg_namespace c ON (b.relnamespace=c.oid)
WHERE
reltype>0 AND
c.nspname in (${schemaNames})`
);
allUsedTypeFields.forEach(oid => this.knownOids[oid] = true);
}
/**
* if schemaName is null, it will be applied for all schemas
*/
async setTypeParser(typeName: string, parser: (string) => any, schemaName?: string): Promise<void> {
try {
if (schemaName) {
let oid = await this.queryOneField(GET_OID_FOR_COLUMN_TYPE_FOR_SCHEMA, { typeName, schemaName });
pg.types.setTypeParser(oid, parser);
delete this.pgdbTypeParsers[oid];
this.knownOids[oid] = true;
} else {
let list = await this.queryOneColumn(GET_OID_FOR_COLUMN_TYPE, { typeName });
list.forEach(oid => {
pg.types.setTypeParser(oid, parser);
delete this.pgdbTypeParsers[oid];
this.knownOids[oid] = true;
});
}
} catch (e) {
throw Error('Not existing type: ' + typeName);
}
}
async setPgDbTypeParser(typeName: string, parser: (string) => any, schemaName?: string): Promise<void> {
try {
if (schemaName) {
let oid = await this.queryOneField(GET_OID_FOR_COLUMN_TYPE_FOR_SCHEMA, { typeName, schemaName });
this.pgdbTypeParsers[oid] = parser;
this.knownOids[oid] = true;
} else {
let list = await this.queryOneColumn(GET_OID_FOR_COLUMN_TYPE, { typeName });
list.forEach(oid => {
this.pgdbTypeParsers[oid] = parser;
this.knownOids[oid] = true;
});
}
} catch (e) {
throw Error('Not existing type: ' + typeName);
}
}
async resetMissingParsers(connection, oidList: number[]): Promise<void> {
let unknownOids = oidList.filter(oid => !this.knownOids[oid]);
if (unknownOids.length) {
let fieldsData = await connection.query(
`select oid, typcategory from pg_type where oid = ANY($1)`,
[unknownOids]
);
fieldsData.rows.forEach(fieldData => {
if (fieldData.typcategory == 'A') {
this.pgdbTypeParsers[fieldData.oid] = PgConverters.arraySplit;
}
this.knownOids[fieldData.oid] = true;
});
}
}
async dedicatedConnectionBegin(): Promise<PgDb> {
if (this.needToFixConnectionForListen()) {
await this.runRestartConnectionForListen();
}
let pgDb = new PgDb(this);
pgDb.connection = await this.pool.connect();
pgDb.connection.on('error', QueryAble.connectionErrorListener); //When there is an error, then the actual called query will throw exception
return pgDb;
}
async dedicatedConnectionEnd(): Promise<PgDb> {
if (this.connection) {
this.connection.off('error', QueryAble.connectionErrorListener);
try {
await this.connection.release();
} catch (err) {
this.getLogger().error('Error while dedicated connection end.', err);
}
this.connection = null;
}
return this;
}
/**
* transaction save point
* https://www.postgresql.org/docs/current/sql-savepoint.html
*/
async savePoint(name: string): Promise<PgDb> {
if (this.isTransactionActive()) {
name = (name || '').replace(/"/g, '');
await this.query(`SAVEPOINT "${name}"`);
} else {
throw Error('No active transaction');
}
return this;
}
/**
* "RELEASE SAVEPOINT" - remove transaction save point
* https://www.postgresql.org/docs/current/sql-savepoint.html
*/
async savePointRelease(name: string): Promise<PgDb> {
if (this.isTransactionActive()) {
name = (name || '').replace(/"/g, '');
await this.query(`RELEASE SAVEPOINT "${name}"`);
} else {
throw Error('No active transaction');
}
return this;
}
async transactionBegin(options?: { isolationLevel?: TranzactionIsolationLevel, deferrable?: boolean, readOnly?: boolean }): Promise<PgDb> {
let pgDb = this.connection ? this : await this.dedicatedConnectionBegin();
let q = 'BEGIN'
if (options?.isolationLevel) {
q += ' ISOLATION LEVEL ' + options.isolationLevel;
}
if (options?.readOnly) {
q += ' READ ONLY';
}
if (options?.deferrable) {
q += ' DEFERRABLE ';
}
await pgDb.query(q);
return pgDb;
}
async transactionCommit(): Promise<PgDb> {
await this.query('COMMIT');
return this.dedicatedConnectionEnd();
}
async transactionRollback(options?: { savePoint?: string }): Promise<PgDb> {
if (options?.savePoint) {
let name = (options.savePoint || '').replace(/"/g, '');
await this.query(`ROLLBACK TO SAVEPOINT "${name}"`);
return this;
} else {
await this.query('ROLLBACK');
return this.dedicatedConnectionEnd();
}
}
isTransactionActive(): boolean {
return this.connection != null;
}
async execute(fileName: string, statementTransformerFunction?: (string) => string): Promise<void> {
let isTransactionInPlace = this.isTransactionActive();
// statements must be run in a dedicated connection
let pgdb = isTransactionInPlace ? this : await this.dedicatedConnectionBegin();
/** run statements one after the other */
let runStatementList = (statementList) => {
//this.getLogger(true).log('consumer start', commands.length);
return new Promise((resolve, reject) => {
let currentStatement = 0;
let runStatement = () => {
//this.getLogger(true).log('commnads length', commands.length, i);
if (statementList.length == currentStatement) {
resolve(undefined);
} else {
let statement = statementList[currentStatement++];
if (statementTransformerFunction) {
statement = statementTransformerFunction(statement);
}
this.getLogger(true).log('run', statementList[currentStatement - 1]);
pgdb.query(statement)
.then(() => runStatement(), reject)
.catch(reject);
}
};
runStatement();
}).catch((e) => {
this.getLogger(true).error(e);
throw e;
});
};
let lineCounter = 0;
let promise = new Promise<void>((resolve, reject) => {
let statementList = [];
let tmp = '', t: RegExpExecArray;
let consumer;
let inQuotedString: string;
let rl = readline.createInterface({
input: fs.createReadStream(fileName),
terminal: false
}).on('line', (line) => {
lineCounter++;
try {
//console.log('Line: ' + line);
while (t = SQL_TOKENIZER_REGEXP.exec(line)) {
if (!inQuotedString && (t[0] == '"' || t[0] == "'") || inQuotedString == '"' || inQuotedString == "'") {
if (!inQuotedString) {
inQuotedString = t[0];
} else if (inQuotedString == t[0]) {
inQuotedString = null;
}
tmp += t[0];
} else if (!inQuotedString && t[0] == '$' || inQuotedString && inQuotedString[0] == '$') {
if (!inQuotedString) {
let s = line.slice(SQL_TOKENIZER_REGEXP.lastIndex - 1);
let token = s.match(SQL_$_ESCAPE_REGEXP);
if (!token) {
throw Error('Invalid sql in line: ' + line);
}
inQuotedString = token[0];
SQL_TOKENIZER_REGEXP.lastIndex += inQuotedString.length - 1;
tmp += inQuotedString;
} else {
tmp += t[0];
if (tmp.endsWith(inQuotedString)) {
inQuotedString = null;
}
}
} else if (!inQuotedString && t[0] == '/*' || inQuotedString == '/*') {
if (!inQuotedString) {
inQuotedString = t[0];
} else if (t[0] == '*/') {
inQuotedString = null;
}
} else if (!inQuotedString && t[0] == '--') {
line = '';
} else if (!inQuotedString && t[0] == ';') {
//console.log('push ' + tmp);
if (tmp.trim() != '') {
statementList.push(tmp);
if (!consumer) {
consumer = runStatementList(statementList).then(() => {
// console.log('consumer done');
consumer = null;
statementList.length = 0;
rl.resume();
}, reject);
rl.pause();
}
}
tmp = '';
} else {
tmp += t[0];
}
}
if (tmp && line) {
tmp += '\n';
}
} catch (e) {
reject(e);
}
}).on('close', () => {
if (inQuotedString) {
reject(Error('Invalid SQL, unterminated string'));
}
//if the last statement did't have ';'
if (tmp.trim() != '') {
statementList.push(tmp);
}
if (!consumer) {
if (statementList.length) {
consumer = runStatementList(statementList).catch(reject);
} else {
resolve();
}
}
if (consumer) {
consumer = consumer.then(resolve, reject);
}
});
});
let error;
return promise
.catch((e) => {
error = e;
this.getLogger(true).error('Error at line ' + lineCounter + ' in ' + fileName + '. ' + e);
})
.then(() => {
// finally
//if transaction was in place, don't touch it
if (!isTransactionInPlace) {
return pgdb.dedicatedConnectionEnd();
}
}).catch((e) => {
this.getLogger(true).error(e);
}).then(() => {
if (error) {
throw error;
}
// console.log('connection released');
});
}
private listeners = new EventEmitter();
private connectionForListen;
private _needToRestartConnectionForListen = false;
private restartConnectionForListen: Promise<Error> = null;
/**
* LISTEN to a channel for a NOTIFY (https://www.postgresql.org/docs/current/sql-listen.html)
* One connection will be dedicated for listening if there are any listeners.
* When there is no other callback for a channel, LISTEN command is executed
*/
async listen(channel: string, callback: (notification: Notification) => void) {
let restartConnectionError: Error = null;
if (this.needToFixConnectionForListen()) {
restartConnectionError = await this.runRestartConnectionForListen();
}
if (this.listeners.listenerCount(channel)) {
this.listeners.on(channel, callback);
} else {
if (restartConnectionError) {
throw restartConnectionError;
}
try {
if (!this.connectionForListen) {
await this.initConnectionForListen();
}
await this.connectionForListen.query(`LISTEN "${channel}"`);
} catch (err) {
this._needToRestartConnectionForListen = true;
throw err;
}
this.listeners.on(channel, callback);
}
}
/**
* Remove a callback which listening on a channel
* When all callback is removed from a channel UNLISTEN command is executed
* When all callback is removed from all channel, dedicated connection is released
*/
async unlisten(channel: string, callback?: (Notification) => void) {
let restartConnectionError: Error = null;
if (this.needToFixConnectionForListen()) {
restartConnectionError = await this.runRestartConnectionForListen();
}
if (callback && this.listeners.listenerCount(channel) > 1) {
this.listeners.removeListener(channel, callback);
} else {
if (restartConnectionError) {
throw restartConnectionError;
}
try {
await this.internalQuery({ connection: this.connectionForListen, sql: `UNLISTEN "${channel}"` });
if (this.listeners.eventNames().length == 1) {
this.connectionForListen.removeAllListeners('notification');
this.connectionForListen.release();
this.connectionForListen = null;
}
} catch (err) {
this._needToRestartConnectionForListen = true;
throw err;
}
this.listeners.removeAllListeners(channel);
}
}
/**
* Notify a channel (https://www.postgresql.org/docs/current/sql-notify.html)
*/
async notify(channel: string, payload?: string) {
if (this.needToFixConnectionForListen()) {
let restartConnectionError = await this.runRestartConnectionForListen();
if (restartConnectionError) {
throw restartConnectionError;
}
}
let hasConnectionForListen = !!this.connectionForListen;
let connection = this.connectionForListen || this.connection;
//let sql = 'NOTIFY ' + channel + ', :payload';
let sql = 'SELECT pg_notify(:channel, :payload)';
let params = { channel, payload };
try {
return this.internalQuery({ connection, sql, params });
} catch (err) {
if (hasConnectionForListen) {
this._needToRestartConnectionForListen = true;
}
throw err;
}
}
async runRestartConnectionForListen(): Promise<Error> {
if (!this._needToRestartConnectionForListen) {
return null;
}
let errorResult: Error = null;
if (!this.restartConnectionForListen) {
this.restartConnectionForListen = (async () => {
let eventNames = this.listeners.eventNames();
try {
this.connectionForListen.on('notification', this.notificationListener );
this.connectionForListen.on('error',this.errorListener);
} catch (e) {}
try{
await this.connectionForListen.release();
} catch (e) {}
this.connectionForListen = null;
let error: Error;
if (eventNames.length) {
try {
await this.initConnectionForListen();
for (let channel of eventNames) {
await this.connectionForListen.query(`LISTEN "${channel as string}"`);
}
} catch (err) {
error = err;
}
}
return error;
})();
errorResult = await this.restartConnectionForListen;
this.restartConnectionForListen = null;
} else {
errorResult = await this.restartConnectionForListen;
}
if (!errorResult) {
this._needToRestartConnectionForListen = false;
}
return errorResult;
}
needToFixConnectionForListen(): boolean {
return this._needToRestartConnectionForListen;
}
private async tryToFixConnectionForListenActively() {
await new Promise(r => setTimeout(r, 1000));
let error = await this.runRestartConnectionForListen();
if (error) {
await this.tryToFixConnectionForListenActively();
}
}
notificationListener = (notification: Notification) => this.listeners.emit(notification.channel, notification)
errorListener = (e) => {
this._needToRestartConnectionForListen = true;
this.tryToFixConnectionForListenActively();
};
private async initConnectionForListen() {
this.connectionForListen = await this.pool.connect();
this.connectionForListen.on('notification', this.notificationListener );
this.connectionForListen.on('error',this.errorListener);
}
}
export default PgDb; | the_stack |
import type ws from 'ws'
import type { webpack5 as webpack } from 'next/dist/compiled/webpack/webpack'
import type { NextConfigComplete } from '../config-shared'
import { EventEmitter } from 'events'
import { findPageFile } from '../lib/find-page-file'
import { runDependingOnPageType } from '../../build/entries'
import { join, posix } from 'path'
import { normalizePathSep } from '../../shared/lib/page-path/normalize-path-sep'
import { normalizePagePath } from '../../shared/lib/page-path/normalize-page-path'
import { ensureLeadingSlash } from '../../shared/lib/page-path/ensure-leading-slash'
import { removePagePathTail } from '../../shared/lib/page-path/remove-page-path-tail'
import { pageNotFoundError } from '../require'
import { reportTrigger } from '../../build/output'
import getRouteFromEntrypoint from '../get-route-from-entrypoint'
import { serverComponentRegex } from '../../build/webpack/loaders/utils'
import { MIDDLEWARE_FILE, MIDDLEWARE_FILENAME } from '../../lib/constants'
import { getPageStaticInfo } from '../../build/analysis/get-page-static-info'
export const ADDED = Symbol('added')
export const BUILDING = Symbol('building')
export const BUILT = Symbol('built')
export const entries: {
/**
* The key composed of the compiler name and the page. For example:
* `edge-server/about`
*/
[page: string]: {
/**
* The absolute page to the page file. For example:
* `/Users/Rick/project/pages/about/index.js`
*/
absolutePagePath: string
/**
* Path to the page file relative to the dist folder with no extension.
* For example: `pages/about/index`
*/
bundlePath: string
/**
* Client entry loader and query parameters when RSC is enabled.
*/
clientLoader?: string
/**
* Tells if a page is scheduled to be disposed.
*/
dispose?: boolean
/**
* Timestamp with the last time the page was active.
*/
lastActiveTime?: number
/**
* Page build status.
*/
status?: typeof ADDED | typeof BUILDING | typeof BUILT
}
} = {}
let invalidator: Invalidator
export const getInvalidator = () => invalidator
export function onDemandEntryHandler({
maxInactiveAge,
multiCompiler,
nextConfig,
pagesBufferLength,
pagesDir,
rootDir,
appDir,
watcher,
}: {
maxInactiveAge: number
multiCompiler: webpack.MultiCompiler
nextConfig: NextConfigComplete
pagesBufferLength: number
pagesDir: string
rootDir: string
appDir?: string
watcher: any
}) {
invalidator = new Invalidator(watcher)
const doneCallbacks: EventEmitter | null = new EventEmitter()
const lastClientAccessPages = ['']
const startBuilding = (_compilation: webpack.Compilation) => {
invalidator.startBuilding()
}
for (const compiler of multiCompiler.compilers) {
compiler.hooks.make.tap('NextJsOnDemandEntries', startBuilding)
}
function getPagePathsFromEntrypoints(
type: 'client' | 'server' | 'edge-server',
entrypoints: Map<string, { name?: string }>,
root?: boolean
) {
const pagePaths: string[] = []
for (const entrypoint of entrypoints.values()) {
const page = getRouteFromEntrypoint(entrypoint.name!, root)
if (page) {
pagePaths.push(`${type}${page}`)
} else if (root && entrypoint.name === 'root') {
pagePaths.push(`${type}/${entrypoint.name}`)
} else if (entrypoint.name === MIDDLEWARE_FILENAME) {
pagePaths.push(`${type}/${entrypoint.name}`)
}
}
return pagePaths
}
multiCompiler.hooks.done.tap('NextJsOnDemandEntries', (multiStats) => {
if (invalidator.rebuildAgain) {
return invalidator.doneBuilding()
}
const [clientStats, serverStats, edgeServerStats] = multiStats.stats
const root = !!appDir
const pagePaths = [
...getPagePathsFromEntrypoints(
'client',
clientStats.compilation.entrypoints,
root
),
...getPagePathsFromEntrypoints(
'server',
serverStats.compilation.entrypoints,
root
),
...(edgeServerStats
? getPagePathsFromEntrypoints(
'edge-server',
edgeServerStats.compilation.entrypoints,
root
)
: []),
]
for (const page of pagePaths) {
const entry = entries[page]
if (!entry) {
continue
}
if (entry.status !== BUILDING) {
continue
}
entry.status = BUILT
doneCallbacks!.emit(page)
}
invalidator.doneBuilding()
})
const pingIntervalTime = Math.max(1000, Math.min(5000, maxInactiveAge))
setInterval(function () {
disposeInactiveEntries(lastClientAccessPages, maxInactiveAge)
}, pingIntervalTime + 1000).unref()
function handlePing(pg: string) {
const page = normalizePathSep(pg)
const pageKey = `client${page}`
const entryInfo = entries[pageKey]
// If there's no entry, it may have been invalidated and needs to be re-built.
if (!entryInfo) {
// if (page !== lastEntry) client pings, but there's no entry for page
return { invalid: true }
}
// 404 is an on demand entry but when a new page is added we have to refresh the page
const toSend = page === '/_error' ? { invalid: true } : { success: true }
// We don't need to maintain active state of anything other than BUILT entries
if (entryInfo.status !== BUILT) return
// If there's an entryInfo
if (!lastClientAccessPages.includes(pageKey)) {
lastClientAccessPages.unshift(pageKey)
// Maintain the buffer max length
if (lastClientAccessPages.length > pagesBufferLength) {
lastClientAccessPages.pop()
}
}
entryInfo.lastActiveTime = Date.now()
entryInfo.dispose = false
return toSend
}
return {
async ensurePage(page: string, clientOnly: boolean) {
const pagePathData = await findPagePathData(
rootDir,
pagesDir,
page,
nextConfig.pageExtensions,
appDir
)
let entryAdded = false
const addPageEntry = (type: 'client' | 'server' | 'edge-server') => {
return new Promise<void>((resolve, reject) => {
const isServerComponent = serverComponentRegex.test(
pagePathData.absolutePagePath
)
const pageKey = `${type}${pagePathData.page}`
if (entries[pageKey]) {
entries[pageKey].dispose = false
entries[pageKey].lastActiveTime = Date.now()
if (entries[pageKey].status === BUILT) {
resolve()
return
}
} else {
if (type === 'client' && isServerComponent) {
// Skip adding the client entry here.
} else {
entryAdded = true
entries[pageKey] = {
absolutePagePath: pagePathData.absolutePagePath,
bundlePath: pagePathData.bundlePath,
dispose: false,
lastActiveTime: Date.now(),
status: ADDED,
}
}
}
doneCallbacks!.once(pageKey, (err: Error) => {
if (err) return reject(err)
resolve()
})
})
}
const staticInfo = await getPageStaticInfo({
pageFilePath: pagePathData.absolutePagePath,
nextConfig,
})
const promises = runDependingOnPageType({
page: pagePathData.page,
pageRuntime: staticInfo.runtime,
onClient: () => addPageEntry('client'),
onServer: () => addPageEntry('server'),
onEdgeServer: () => addPageEntry('edge-server'),
})
if (entryAdded) {
reportTrigger(
!clientOnly && promises.length > 1
? `${pagePathData.page} (client and server)`
: pagePathData.page
)
invalidator.invalidate()
}
return Promise.all(promises)
},
onHMR(client: ws) {
client.addEventListener('message', ({ data }) => {
try {
const parsedData = JSON.parse(
typeof data !== 'string' ? data.toString() : data
)
if (parsedData.event === 'ping') {
const result = handlePing(parsedData.page)
client.send(
JSON.stringify({
...result,
event: 'pong',
})
)
}
} catch (_) {}
})
},
}
}
function disposeInactiveEntries(
lastClientAccessPages: string[],
maxInactiveAge: number
) {
Object.keys(entries).forEach((page) => {
const { lastActiveTime, status, dispose } = entries[page]
// Skip pages already scheduled for disposing
if (dispose) return
// This means this entry is currently building or just added
// We don't need to dispose those entries.
if (status !== BUILT) return
// We should not build the last accessed page even we didn't get any pings
// Sometimes, it's possible our XHR ping to wait before completing other requests.
// In that case, we should not dispose the current viewing page
if (lastClientAccessPages.includes(page)) return
if (lastActiveTime && Date.now() - lastActiveTime > maxInactiveAge) {
entries[page].dispose = true
}
})
}
// Make sure only one invalidation happens at a time
// Otherwise, webpack hash gets changed and it'll force the client to reload.
class Invalidator {
private watcher: any
private building: boolean
public rebuildAgain: boolean
constructor(watcher: any) {
this.watcher = watcher
// contains an array of types of compilers currently building
this.building = false
this.rebuildAgain = false
}
invalidate() {
// If there's a current build is processing, we won't abort it by invalidating.
// (If aborted, it'll cause a client side hard reload)
// But let it to invalidate just after the completion.
// So, it can re-build the queued pages at once.
if (this.building) {
this.rebuildAgain = true
return
}
this.building = true
this.watcher.invalidate()
}
startBuilding() {
this.building = true
}
doneBuilding() {
this.building = false
if (this.rebuildAgain) {
this.rebuildAgain = false
this.invalidate()
}
}
}
/**
* Attempts to find a page file path from the given pages absolute directory,
* a page and allowed extensions. If the page can't be found it will throw an
* error. It defaults the `/_error` page to Next.js internal error page.
*
* @param rootDir Absolute path to the project root.
* @param pagesDir Absolute path to the pages folder with trailing `/pages`.
* @param normalizedPagePath The page normalized (it will be denormalized).
* @param pageExtensions Array of page extensions.
*/
async function findPagePathData(
rootDir: string,
pagesDir: string,
page: string,
extensions: string[],
appDir?: string
) {
const normalizedPagePath = tryToNormalizePagePath(page)
let pagePath: string | null = null
if (normalizedPagePath === MIDDLEWARE_FILE) {
pagePath = await findPageFile(rootDir, normalizedPagePath, extensions)
if (!pagePath) {
throw pageNotFoundError(normalizedPagePath)
}
const pageUrl = ensureLeadingSlash(
removePagePathTail(normalizePathSep(pagePath), {
extensions,
})
)
return {
absolutePagePath: join(rootDir, pagePath),
bundlePath: normalizedPagePath.slice(1),
page: posix.normalize(pageUrl),
}
}
// Check appDir first falling back to pagesDir
if (appDir) {
pagePath = await findPageFile(appDir, normalizedPagePath, extensions)
if (pagePath) {
const pageUrl = ensureLeadingSlash(
removePagePathTail(normalizePathSep(pagePath), {
keepIndex: true,
extensions,
})
)
return {
absolutePagePath: join(appDir, pagePath),
bundlePath: posix.join('app', normalizePagePath(pageUrl)),
page: posix.normalize(pageUrl),
}
}
}
if (!pagePath) {
pagePath = await findPageFile(pagesDir, normalizedPagePath, extensions)
}
if (pagePath !== null) {
const pageUrl = ensureLeadingSlash(
removePagePathTail(normalizePathSep(pagePath), {
extensions,
})
)
return {
absolutePagePath: join(pagesDir, pagePath),
bundlePath: posix.join('pages', normalizePagePath(pageUrl)),
page: posix.normalize(pageUrl),
}
}
if (page === '/_error') {
return {
absolutePagePath: require.resolve('next/dist/pages/_error'),
bundlePath: page,
page: normalizePathSep(page),
}
} else {
throw pageNotFoundError(normalizedPagePath)
}
}
function tryToNormalizePagePath(page: string) {
try {
return normalizePagePath(page)
} catch (err) {
console.error(err)
throw pageNotFoundError(page)
}
} | the_stack |
import { FabricConnection } from '../src/FabricConnection';
import * as chai from 'chai';
import * as chaiAsPromised from 'chai-as-promised';
import * as sinon from 'sinon';
import * as sinonChai from 'sinon-chai';
import * as fabricClientCA from 'fabric-ca-client';
import { Gateway, Wallet } from 'fabric-network';
import { OutputAdapter, LogType } from 'ibm-blockchain-platform-common';
import { FabricWallet } from 'ibm-blockchain-platform-wallet';
import * as path from 'path';
import { Client, Channel } from 'fabric-common';
import { LifecyclePeer, LifecycleChannel, Lifecycle } from 'ibm-blockchain-platform-fabric-admin';
chai.should();
chai.use(sinonChai);
chai.use(chaiAsPromised);
// tslint:disable no-unused-expression
// tslint:disable no-use-before-declare
describe('FabricConnection', () => {
class TestOutputAdapter extends OutputAdapter {
public static instance(): TestOutputAdapter {
return TestOutputAdapter._instance;
}
private static _instance: TestOutputAdapter = new TestOutputAdapter();
private constructor() {
super();
}
public log(type: LogType, popupMessage: string, outputMessage?: string, stackTrace?: string): void {
super.log(type, popupMessage, outputMessage, stackTrace);
}
}
// tslint:disable-next-line: max-classes-per-file
class TestFabricConnection extends FabricConnection {
private connectionProfile: any;
constructor(connectionProfilePath: string, connectionProfile: any, outputAdapter: OutputAdapter) {
super(connectionProfilePath, outputAdapter);
this.connectionProfile = connectionProfile;
}
async connect(wallet: FabricWallet, identityName: string, requestTimeout: number): Promise<void> {
this['gateway'] = fabricGatewayStub as unknown as Gateway;
await this.connectInner(this.connectionProfile, wallet.getWallet(), identityName, requestTimeout);
}
}
let mySandBox: sinon.SinonSandbox;
let fabricClientStub: sinon.SinonStubbedInstance<Client>;
let fabricGatewayStub: sinon.SinonStubbedInstance<Gateway>;
let fabricConnection: TestFabricConnection;
let fabricChannelStub: sinon.SinonStubbedInstance<Channel>;
let fabricCAStub: sinon.SinonStubbedInstance<fabricClientCA>;
let mockWallet: sinon.SinonStubbedInstance<Wallet>;
const mockIdentityName: string = 'admin';
let fabricWallet: FabricWallet;
const timeout: number = 120;
beforeEach(async () => {
mySandBox = sinon.createSandbox();
const connectionProfile: any = {
peers: {
'peer0.org1.example.com': {
url: 'grpc://localhost:7051'
}
},
orderers: {
'orderer.example.com': {
url: 'grpc://localhost:7050'
}
},
channels: {
'channel-from-the-ccp-no-peers': {
},
'channel-from-the-ccp': {
peers: {
'peer0.org1.example.com': {
}
}
}
},
certificateAuthorities: {
'ca.org1.example.com': {
url: 'https://localhost:7054'
}
}
};
mockWallet = mySandBox.createStubInstance(Wallet);
mockWallet.list.resolves([{ label: 'admin' }]);
fabricWallet = await FabricWallet.newFabricWallet(path.join(__dirname, 'tmp', 'wallet'));
mySandBox.stub(fabricWallet, 'getWallet').returns(mockWallet);
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
fabricClientStub = mySandBox.createStubInstance(Client);
fabricClientStub.getEndorsers.returns([{
name: 'peer0.org1.example.com',
mspid: 'Org1MSP',
endpoint: {
url: 'grpc://localhost:7051',
options: {
'grpc.default_authority': 'batman.ibm.com',
'somewhere_over_the_rainbow': 'wat'
}
}
}, {
name: 'peer0.org2.example.com',
mspid: 'Org2MSP',
endpoint: {
url: 'grpc://localhost:8051',
options: {}
}
}]);
fabricGatewayStub = mySandBox.createStubInstance(Gateway);
fabricCAStub = mySandBox.createStubInstance(fabricClientCA);
fabricCAStub.enroll.returns({ certificate: 'myCert', key: { toBytes: mySandBox.stub().returns('myKey') } });
fabricCAStub.register.resolves('its a secret');
fabricGatewayStub['client'] = fabricClientStub;
fabricGatewayStub.connect.resolves();
fabricGatewayStub.getOptions.returns({ wallet: mockWallet, identity: mockIdentityName });
fabricChannelStub = mySandBox.createStubInstance(Channel);
const fabricNetworkStub: any = {
getChannel: mySandBox.stub().returns(fabricChannelStub)
};
fabricGatewayStub.getNetwork.returns(fabricNetworkStub);
fabricGatewayStub.disconnect.returns(null);
fabricConnection['gateway'] = fabricGatewayStub as unknown as Gateway;
fabricConnection['outputAdapter'] = TestOutputAdapter.instance();
await fabricConnection.connect(fabricWallet, mockIdentityName, 120);
});
afterEach(() => {
mySandBox.restore();
});
describe('constructor', () => {
it('should set output adapter', async () => {
const adapter: TestOutputAdapter = TestOutputAdapter.instance();
const connectionProfile: any = {
orderers: {
'orderer.example.com': {
url: 'grpc://localhost:7050'
}
}
};
const fabConnection: TestFabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, adapter);
fabConnection['outputAdapter'].should.deep.equal(adapter);
});
});
describe('connect', () => {
it('should use discovery as localhost for localhost orderer connections', async () => {
const connectionProfile: any = {
orderers: {
'orderer.example.com': {
url: 'grpc://localhost:7050'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: true,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for remote orderer connections', async () => {
const connectionProfile: any = {
orderers: {
'orderer.example.com': {
url: 'grpc://192.168.1.1:7050'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for dodgy orderer connections', async () => {
const connectionProfile: any = {
orderers: {
'orderer.example.com': {
missingUrl: 'grpc://192.168.1.1:7050'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should use discovery as localhost for localhost peer connections', async () => {
const connectionProfile: any = {
peers: {
'peer0.org1.example.com': {
url: 'grpc://localhost:7051'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: true,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for remote peer connections', async () => {
const connectionProfile: any = {
peers: {
'peer0.org1.example.com': {
url: 'grpc://192.168.1.1:7051'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for dodgy peer connections', async () => {
const connectionProfile: any = {
peers: {
'peer0.org1.example.com': {
missingUrl: 'grpc://192.168.1.1:7051'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should use discovery as localhost for localhost certificate authority connections', async () => {
const connectionProfile: any = {
certificateAuthorities: {
'ca.org1.example.com': {
url: 'http://localhost:7054'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: true,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for remote certificate authority connections', async () => {
const connectionProfile: any = {
certificateAuthorities: {
'ca.org1.example.com': {
url: 'http://192.168.1.1:7054'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
it('should not use discovery as localhost for dodgy certificate authority connections', async () => {
const connectionProfile: any = {
certificateAuthorities: {
'ca.org1.example.com': {
missingUrl: 'http://192.168.1.1:7054'
}
}
};
fabricConnection = new TestFabricConnection('/tmp/somepath.json', connectionProfile, null);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
fabricGatewayStub.connect.should.have.been.calledWith(connectionProfile, {
wallet: mockWallet,
identity: mockIdentityName,
discovery: {
asLocalhost: false,
enabled: true
},
eventHandlerOptions: {
commitTimeout: timeout
},
queryHandlerOptions: {
strategy: sinon.match.any/*queryFactory.createQueryHandler*/
}
});
});
});
describe('getAllPeerNames', () => {
it('should get all the names of the peer', async () => {
const peerNames: Array<string> = fabricConnection.getAllPeerNames();
peerNames.should.deep.equal(['peer0.org1.example.com', 'peer0.org2.example.com']);
});
});
describe('getAllChannelsForPeer', () => {
it('should get all the channels a peer has joined', async () => {
mySandBox.stub(LifecyclePeer.prototype, 'getAllChannelNames').resolves(['channel-one', 'channel-two']);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
const channelNames: Array<string> = await fabricConnection.getAllChannelsForPeer('peer0.org1.example.com');
channelNames.should.deep.equal(['channel-one', 'channel-two']);
});
it('should use the connection profile for the list of channels if the peer says access is denied', async () => {
mySandBox.stub(LifecyclePeer.prototype, 'getAllChannelNames').rejects(new Error('blah access denied blah'));
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
const channelNames: Array<string> = await fabricConnection.getAllChannelsForPeer('peer0.org1.example.com');
channelNames.should.deep.equal(['channel-from-the-ccp']);
});
it('should rethrow the error if the peer says access is denied and there are no matching channels in the connection profile', async () => {
mySandBox.stub(LifecyclePeer.prototype, 'getAllChannelNames').rejects(new Error('blah access denied blah'));
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
await fabricConnection.getAllChannelsForPeer('peer0.org2.example.com')
.should.be.rejectedWith(/blah access denied blah/);
});
it('should rethrow any other error', async () => {
mySandBox.stub(LifecyclePeer.prototype, 'getAllChannelNames').rejects(new Error('such error'));
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
await fabricConnection.getAllChannelsForPeer('peer0.org1.example.com')
.should.be.rejectedWith(/such error/);
});
});
describe('getInstantiatedChaincode', () => {
it('should get the instantiated chaincode from a v2 channel in the connection profile', async () => {
const getChannelCapabilitiesStub: sinon.SinonStub = mySandBox.stub(LifecyclePeer.prototype, 'getChannelCapabilities');
getChannelCapabilitiesStub.resolves(['V2_0']);
mySandBox.stub(Lifecycle.prototype, 'getPeer').returns({
getChannelCapabilities: getChannelCapabilitiesStub
});
// this is needed for creating a channel map
const getAllChannelsForPeerStub: sinon.SinonStub = mySandBox.stub(fabricConnection, 'getAllChannelsForPeer');
getAllChannelsForPeerStub.withArgs('peerOne').returns(['channel1']);
getAllChannelsForPeerStub.withArgs('peerTwo').returns(['channel2']);
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
mySandBox.stub(LifecycleChannel.prototype, 'getAllCommittedSmartContracts').resolves([{ smartContractName: 'biscuit-network', smartContractVersion: '0.7', sequence: 2 }, { smartContractName: 'cake-network', smartContractVersion: '0.8', sequence: 3 }]);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
const instantiatedChaincodes: Array<any> = await fabricConnection.getInstantiatedChaincode('channel1');
instantiatedChaincodes.should.deep.equal([{ name: 'biscuit-network', version: '0.7', sequence: 2 }, {
name: 'cake-network',
version: '0.8',
sequence: 3
}]);
});
it('should get the instantiated chaincode from a v1 channel in the connection profile', async () => {
const getChannelCapabilitiesStub: sinon.SinonStub = mySandBox.stub(LifecyclePeer.prototype, 'getChannelCapabilities');
getChannelCapabilitiesStub.resolves(['V1_4']);
mySandBox.stub(Lifecycle.prototype, 'getPeer').returns({
getChannelCapabilities: getChannelCapabilitiesStub
});
// this is needed for creating a channel map
const getAllChannelsForPeerStub: sinon.SinonStub = mySandBox.stub(fabricConnection, 'getAllChannelsForPeer');
getAllChannelsForPeerStub.withArgs('peerOne').returns(['channel1']);
getAllChannelsForPeerStub.withArgs('peerTwo').returns(['channel2']);
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
mySandBox.stub(LifecycleChannel.prototype, 'getAllInstantiatedSmartContracts').resolves([{ smartContractName: 'grape-network', smartContractVersion: '0.8', sequence: -1 }, { smartContractName: 'fruit-network', smartContractVersion: '0.9', sequence: -1 }]);
await fabricConnection.connect(fabricWallet, mockIdentityName, timeout);
const instantiatedChaincodes: Array<any> = await fabricConnection.getInstantiatedChaincode('channel1');
instantiatedChaincodes.should.deep.equal([
{
name: 'grape-network',
version: '0.8',
sequence: -1
},
{
name: 'fruit-network',
version: '0.9',
sequence: -1
}
]);
});
});
describe('disconnect', () => {
it('should disconnect from gateway', async () => {
fabricConnection.disconnect();
fabricGatewayStub.disconnect.should.have.been.called;
});
});
describe('createChannelMap', () => {
// TODO remove/modify this when a design for displaying v1 and v2 tree elements is decided
it.skip('should create channel map', async () => {
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
const getChannelCapabilitiesStub: sinon.SinonStub = mySandBox.stub(LifecyclePeer.prototype, 'getChannelCapabilities');
getChannelCapabilitiesStub.resolves(['V2_0']);
mySandBox.stub(Lifecycle.prototype, 'getPeer').returns({
getChannelCapabilities: getChannelCapabilitiesStub
});
const getAllChannelsForPeerStub: sinon.SinonStub = mySandBox.stub(fabricConnection, 'getAllChannelsForPeer');
getAllChannelsForPeerStub.withArgs('peerOne').returns(['channel1']);
getAllChannelsForPeerStub.withArgs('peerTwo').returns(['channel2']);
const _map: Map<string, Array<string>> = new Map<string, Array<string>>();
_map.set('channel1', ['peerOne']);
_map.set('channel2', ['peerTwo']);
const map: Map<string, Array<string>> = await fabricConnection.createChannelMap();
getChannelCapabilitiesStub.should.have.been.calledTwice;
map.should.deep.equal(_map);
});
it('should add any peers to channel map if the channel already exists', async () => {
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
const getChannelCapabilitiesStub: sinon.SinonStub = mySandBox.stub(LifecyclePeer.prototype, 'getChannelCapabilities');
getChannelCapabilitiesStub.resolves(['V2_0']);
mySandBox.stub(Lifecycle.prototype, 'getPeer').returns({
getChannelCapabilities: getChannelCapabilitiesStub
});
const getAllChannelsForPeerStub: sinon.SinonStub = mySandBox.stub(fabricConnection, 'getAllChannelsForPeer');
getAllChannelsForPeerStub.withArgs('peerOne').returns(['channel1', 'channel2']);
getAllChannelsForPeerStub.withArgs('peerTwo').returns(['channel2']);
const _map: Map<string, Array<string>> = new Map<string, Array<string>>();
_map.set('channel1', ['peerOne']);
_map.set('channel2', ['peerOne', 'peerTwo']);
const map: Map<string, Array<string>> = await fabricConnection.createChannelMap();
map.should.deep.equal(_map);
});
it('should handle gRPC connection errors', async () => {
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
const error: Error = new Error('Received http2 header with status: 503');
mySandBox.stub(fabricConnection, 'getAllChannelsForPeer').throws(error);
await fabricConnection.createChannelMap().should.be.rejectedWith(`Cannot connect to Fabric: ${error.message}`);
});
it('should handle no peers', async () => {
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns([]);
await fabricConnection.createChannelMap().should.be.rejectedWith('Error querying channel list: Could not find any peers to query the list of channels from');
});
// TODO remove/modify this when a design for displaying v1 and v2 tree elements is decided
it.skip('should throw error if channel is not using v2 capabilities ', async () => {
mySandBox.stub(fabricConnection, 'getAllPeerNames').returns(['peerOne', 'peerTwo']);
const getChannelCapabilitiesStub: sinon.SinonStub = mySandBox.stub(LifecyclePeer.prototype, 'getChannelCapabilities');
getChannelCapabilitiesStub.resolves(['V1_4_3']);
mySandBox.stub(Lifecycle.prototype, 'getPeer').returns({
getChannelCapabilities: getChannelCapabilitiesStub
});
const getAllChannelsForPeerStub: sinon.SinonStub = mySandBox.stub(fabricConnection, 'getAllChannelsForPeer');
getAllChannelsForPeerStub.withArgs('peerOne').returns(['channel1']);
getAllChannelsForPeerStub.withArgs('peerTwo').returns(['channel2']);
await fabricConnection.createChannelMap().should.be.rejectedWith(`Unable to connect to network, channel 'channel1' does not have V2_0 capabilities enabled.`);
getChannelCapabilitiesStub.should.have.been.calledOnce;
});
it('should handle any other errors', async () => {
const error: Error = new Error('some error');
mySandBox.stub(fabricConnection, 'getAllPeerNames').throws(error);
await fabricConnection.createChannelMap().should.be.rejectedWith(`Error querying channel list: ${error.message}`);
});
});
describe('getChannelPeersInfo', () => {
it('should get the channel peer names', async () => {
const channelName: string = 'theChannelName';
const peerNames: string[] = ['peerName1', 'peerName2'];
const mspIds: string[] = ['Org1MSP', 'Org2MSP'];
fabricChannelStub.getEndorsers.returns([{ name: peerNames[0], mspid: mspIds[0] }, { name: peerNames[1], mspid: mspIds[1] }]);
const channelPeersInfo: { name: string, mspID: string }[] = await fabricConnection.getChannelPeersInfo(channelName);
fabricChannelStub.getEndorsers.should.have.been.calledOnce;
channelPeersInfo.length.should.equal(2);
channelPeersInfo.should.deep.equal([{ name: peerNames[0], mspID: mspIds[0] }, { name: peerNames[1], mspID: mspIds[1] }]);
});
it('should handle any errors', async () => {
const channelName: string = 'theChannelName';
const error: Error = new Error('Could not get channel');
fabricChannelStub.getEndorsers.throws(error);
await fabricConnection.getChannelPeersInfo(channelName).should.be.rejectedWith(`Unable to get channel peers info: ${error.message}`);
});
});
describe('getChannelCapabilityFromPeer', () => {
let mockPeer: sinon.SinonStubbedInstance<LifecyclePeer>;
let channelName: string;
let peerName: string;
beforeEach(() => {
channelName = 'mychannel';
peerName = 'Org1 Peer1';
mockPeer = mySandBox.createStubInstance(LifecyclePeer);
fabricConnection['lifecycle']['peers'].clear();
fabricConnection['lifecycle']['peers'].set(peerName, mockPeer as unknown as LifecyclePeer);
mockPeer.getChannelCapabilities.resolves(['V2_0']);
});
it('should get the capabilities of channel', async () => {
const result: string[] = await fabricConnection.getChannelCapabilityFromPeer(channelName, peerName);
result.should.deep.equal(['V2_0']);
mockPeer.getChannelCapabilities.should.have.been.calledOnce;
});
it('should handle errors', async () => {
const error: Error = new Error('some error');
mockPeer.getChannelCapabilities.rejects(error);
await fabricConnection.getChannelCapabilityFromPeer(channelName, peerName).should.be.rejectedWith(`Unable to determine channel capabilities of channel ${channelName}: ${error.message}`);
mockPeer.getChannelCapabilities.should.have.been.calledOnce;
});
});
}); | the_stack |
import * as Yup from 'yup'
import { getAllLoadNames, getAllDisplayNames } from '../definitions'
import { getDefaultLoadName, getDefaultDisplayName } from './formSelectors'
import {
labwareTypeOptions,
wellBottomShapeOptions,
wellShapeOptions,
DEFAULT_RACK_BRAND,
IRREGULAR_LABWARE_ERROR,
LABWARE_TOO_SMALL_ERROR,
LABWARE_TOO_LARGE_ERROR,
LABELS,
LOOSE_TIP_FIT_ERROR,
MAX_X_DIMENSION,
MAX_Y_DIMENSION,
MAX_Z_DIMENSION,
MIN_X_DIMENSION,
MIN_Y_DIMENSION,
REQUIRED_FIELD_ERROR,
MUST_BE_A_NUMBER_ERROR,
LabwareFields,
} from './fields'
import type { ProcessedLabwareFields } from './fields'
// global overrides for Yup's default error messages.
Yup.setLocale({
mixed: {
required: REQUIRED_FIELD_ERROR,
},
})
const ALL_DISPLAY_NAMES = new Set(
getAllDisplayNames().map(n => n.toLowerCase().trim())
)
const requiredString = (label: string): Yup.StringSchema =>
Yup.string().label(label).typeError(REQUIRED_FIELD_ERROR).required()
// put this in a transform to make Yup to show "this field is required"
// instead of "must be a number". See https://github.com/jquense/yup/issues/971
const nanToUndefined = (value: number): number | undefined =>
isNaN(value) ? undefined : value
const requiredPositiveNumber = (label: string): Yup.NumberSchema =>
Yup.number()
.label(label)
.transform(nanToUndefined)
.typeError(MUST_BE_A_NUMBER_ERROR)
.moreThan(0)
.required()
const requiredPositiveInteger = (label: string): Yup.NumberSchema =>
Yup.number()
.transform(nanToUndefined)
.label(label)
.typeError(MUST_BE_A_NUMBER_ERROR)
.moreThan(0)
.integer()
.required()
const unsupportedLabwareIfFalse = (label: string): Yup.BooleanSchema =>
Yup.boolean()
.default(false)
.label(label)
.typeError(REQUIRED_FIELD_ERROR)
.oneOf([true], IRREGULAR_LABWARE_ERROR)
.required()
const nameExistsError = (nameName: string): string =>
`This ${nameName} already exists in the Opentrons default labware library. Please edit the ${nameName} to make it unique.`
// NOTE: all IRREGULAR_LABWARE_ERROR messages will be converted to a special 'error' Alert
// NOTE: same as getIsCustomTubeRack util, but different args type
const matchCustomTubeRack = (
labwareType: LabwareFields['labwareType'],
tubeRackInsertLoadName: LabwareFields['tubeRackInsertLoadName']
): boolean =>
labwareType === 'tubeRack' && tubeRackInsertLoadName === 'customTubeRack'
// NOTE: same as getIsOpentronsTubeRack util, but different args type
const matchOpentronsTubeRack = (
labwareType: LabwareFields['labwareType'],
tubeRackInsertLoadName: LabwareFields['tubeRackInsertLoadName']
): boolean =>
labwareType === 'tubeRack' && tubeRackInsertLoadName !== 'customTubeRack'
export const labwareFormSchemaBaseObject = Yup.object({
labwareType: requiredString(LABELS.labwareType).oneOf(
labwareTypeOptions.map(o => o.value)
),
tubeRackInsertLoadName: Yup.mixed().when('labwareType', {
is: 'tubeRack',
then: requiredString(LABELS.tubeRackInsertLoadName),
otherwise: Yup.mixed().nullable(),
}),
// TODO(mc, 2020-06-02): should this be Yup.string() instead of mixed?
aluminumBlockType: Yup.mixed().when('labwareType', {
is: 'aluminumBlock',
then: requiredString(LABELS.aluminumBlockType),
otherwise: Yup.mixed().nullable(),
}),
// TODO(mc, 2020-06-02): should this be Yup.string() instead of mixed?
aluminumBlockChildType: Yup.mixed().when(
['labwareType', 'aluminumBlockType'],
{
// only required for 96-well aluminum block
is: (labwareType: string, aluminumBlockType: string): boolean =>
labwareType === 'aluminumBlock' && aluminumBlockType === '96well',
then: requiredString(LABELS.aluminumBlockChildType),
otherwise: Yup.mixed().nullable(),
}
),
handPlacedTipFit: Yup.string().when('labwareType', {
is: 'tipRack',
then: requiredString(LABELS.handPlacedTipFit).oneOf(
['snug'],
LOOSE_TIP_FIT_ERROR
),
otherwise: Yup.string().nullable(),
}),
// tubeRackSides: Array<string>
footprintXDimension: Yup.number()
.transform(nanToUndefined)
.label(LABELS.footprintXDimension)
.typeError(MUST_BE_A_NUMBER_ERROR)
.min(MIN_X_DIMENSION, LABWARE_TOO_SMALL_ERROR)
.max(MAX_X_DIMENSION, LABWARE_TOO_LARGE_ERROR)
.nullable()
.required(),
footprintYDimension: Yup.number()
.transform(nanToUndefined)
.label(LABELS.footprintYDimension)
.typeError(MUST_BE_A_NUMBER_ERROR)
.min(MIN_Y_DIMENSION, LABWARE_TOO_SMALL_ERROR)
.max(MAX_Y_DIMENSION, LABWARE_TOO_LARGE_ERROR)
.nullable()
.required(),
labwareZDimension: requiredPositiveNumber(LABELS.labwareZDimension).max(
MAX_Z_DIMENSION,
IRREGULAR_LABWARE_ERROR
),
gridRows: requiredPositiveInteger(LABELS.gridRows),
gridColumns: requiredPositiveInteger(LABELS.gridColumns),
// TODO(mc, 2020-06-02): should this be number() instead of mixed?
gridSpacingX: Yup.mixed().when('gridColumns', {
is: 1,
then: Yup.mixed().default(0),
otherwise: requiredPositiveNumber(LABELS.gridSpacingX),
}),
// TODO(mc, 2020-06-02): should this be number() instead of mixed()?
gridSpacingY: Yup.mixed().when('gridRows', {
is: 1,
then: Yup.mixed().default(0),
otherwise: requiredPositiveNumber(LABELS.gridSpacingY),
}),
gridOffsetX: requiredPositiveNumber(LABELS.gridOffsetX),
gridOffsetY: requiredPositiveNumber(LABELS.gridOffsetY),
homogeneousWells: unsupportedLabwareIfFalse(LABELS.homogeneousWells),
regularRowSpacing: Yup.mixed().when('gridRows', {
is: 1,
then: Yup.mixed().default(true),
otherwise: unsupportedLabwareIfFalse(LABELS.regularRowSpacing),
}),
regularColumnSpacing: Yup.mixed().when('gridColumns', {
is: 1,
then: Yup.mixed().default(true),
otherwise: unsupportedLabwareIfFalse(LABELS.regularColumnSpacing),
}),
wellVolume: requiredPositiveNumber(LABELS.wellVolume),
wellBottomShape: Yup.string().when('labwareType', {
is: 'tipRack',
then: Yup.string().nullable(),
otherwise: requiredString(LABELS.wellBottomShape).oneOf(
wellBottomShapeOptions.map(o => o.value)
),
}),
wellDepth: Yup.number().when('labwareType', {
is: 'tipRack',
then: Yup.number()
.transform(nanToUndefined)
.label('Length')
.typeError(MUST_BE_A_NUMBER_ERROR)
.moreThan(0)
.max(
Yup.ref('labwareZDimension'),
'Tip Length cannot exceed labware height'
)
.required(),
otherwise: Yup.number()
.transform(nanToUndefined)
.label(LABELS.wellDepth)
.typeError(MUST_BE_A_NUMBER_ERROR)
.moreThan(0)
.max(
Yup.ref('labwareZDimension'),
'Well depth cannot exceed labware height'
)
.required(),
}),
wellShape: requiredString(LABELS.wellShape).oneOf(
wellShapeOptions.map(o => o.value)
),
// used with circular well shape only
wellDiameter: Yup.mixed().when('wellShape', {
is: 'circular',
then: requiredPositiveNumber(LABELS.wellDiameter),
otherwise: Yup.mixed().nullable(),
}),
// used with rectangular well shape only
wellXDimension: Yup.mixed().when('wellShape', {
is: 'rectangular',
then: requiredPositiveNumber(LABELS.wellXDimension),
otherwise: Yup.mixed().nullable(),
}),
wellYDimension: Yup.mixed().when('wellShape', {
is: 'rectangular',
then: requiredPositiveNumber(LABELS.wellYDimension),
otherwise: Yup.mixed().nullable(),
}),
// non-custom tuberacks (aka Opentrons tube racks) default to "Opentrons" brand,
// and user cannot see or override brand (aka "rack brand")
brand: Yup.mixed().when(['labwareType', 'tubeRackInsertLoadName'], {
is: matchOpentronsTubeRack,
then: Yup.mixed().nullable(), // defaulted to DEFAULT_RACK_BRAND in form-level transform below
otherwise: requiredString(LABELS.brand),
}),
// TODO(mc, 2020-06-02): should this be Yup.array() instead of mixed?
brandId: Yup.mixed()
.nullable()
.transform(
(
currentValue: string | null | undefined,
originalValue: string | null | undefined
): ProcessedLabwareFields['brandId'] =>
(currentValue || '')
.trim()
.split(',')
.map(s => s.trim())
.filter(Boolean)
),
// group brand (aka "tube brand") is only required for custom tube racks
groupBrand: Yup.mixed().when(['labwareType', 'tubeRackInsertLoadName'], {
is: matchCustomTubeRack,
then: requiredString(LABELS.groupBrand),
otherwise: Yup.mixed().nullable(),
}),
// TODO(mc, 2020-06-02): should this be Yup.array() instead of mixed?
groupBrandId: Yup.mixed()
.nullable()
.transform(
(
currentValue: string | null | undefined,
originalValue: string | null | undefined
): ProcessedLabwareFields['groupBrandId'] =>
(currentValue || '')
.trim()
.split(',')
.map(s => s.trim())
.filter(Boolean)
),
loadName: Yup.string()
.nullable()
.label(LABELS.loadName)
.notOneOf(getAllLoadNames(), nameExistsError('load name'))
.matches(
/^[a-z0-9._]+$/,
'${label} can only contain lowercase letters, numbers, dot (.) and underscore (_). Spaces are not allowed.' // eslint-disable-line no-template-curly-in-string
),
// TODO(mc, 2020-06-02): should this be Yup.string() instead of mixed?
displayName: Yup.mixed()
.nullable()
.label(LABELS.displayName)
.test(
'displayNameDoesNotAlreadyExist',
nameExistsError('display name'),
(value: string | null | undefined) =>
!ALL_DISPLAY_NAMES.has(
(value == null ? '' : value).toLowerCase().trim()
) // case-insensitive and trim-insensitive match
)
.transform(
(
currentValue: string | null | undefined,
originalValue: string | null | undefined
) => (currentValue == null ? currentValue : currentValue.trim())
),
pipetteName: requiredString(LABELS.pipetteName),
})
// @ts-expect-error(IL, 2021-03-25): something(s) about this schema don't match the flow type (labwareType: string problem??)
export const labwareFormSchema: Yup.Schema<ProcessedLabwareFields> = labwareFormSchemaBaseObject.transform(
(currentValue, originalValue) => {
// "form-level" transforms
// NOTE: the currentValue does NOT have field-level transforms applied :(
// TODO: these results are not validated, ideally I could do these transforms in the fields
// Yup runs transforms before defaults. In order for brand defaulting to work for displayName/loadName
// creation, we can't do .default() to the brand field, but need to default it here.
const brand = matchOpentronsTubeRack(
currentValue.labwareType,
currentValue.tubeRackInsertLoadName
)
? DEFAULT_RACK_BRAND
: currentValue.brand
const nextValues = { ...currentValue, brand }
const displayName =
currentValue.displayName == null || currentValue.displayName.trim() === ''
? getDefaultDisplayName(nextValues)
: currentValue.displayName
const loadName = currentValue.loadName || getDefaultLoadName(nextValues)
return {
...currentValue,
loadName,
displayName,
}
}
) | the_stack |
import * as os from 'os'
import * as events from 'events'
import * as child from 'child_process'
import * as path from 'path'
import * as stream from 'stream'
import * as im from './interfaces'
import * as io from '@actions/io'
import * as ioUtil from '@actions/io/lib/io-util'
import {setTimeout} from 'timers'
/* eslint-disable @typescript-eslint/unbound-method */
const IS_WINDOWS = process.platform === 'win32'
/*
* Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
*/
export class ToolRunner extends events.EventEmitter {
constructor(toolPath: string, args?: string[], options?: im.ExecOptions) {
super()
if (!toolPath) {
throw new Error("Parameter 'toolPath' cannot be null or empty.")
}
this.toolPath = toolPath
this.args = args || []
this.options = options || {}
}
private toolPath: string
private args: string[]
private options: im.ExecOptions
private _debug(message: string): void {
if (this.options.listeners && this.options.listeners.debug) {
this.options.listeners.debug(message)
}
}
private _getCommandString(
options: im.ExecOptions,
noPrefix?: boolean
): string {
const toolPath = this._getSpawnFileName()
const args = this._getSpawnArgs(options)
let cmd = noPrefix ? '' : '[command]' // omit prefix when piped to a second tool
if (IS_WINDOWS) {
// Windows + cmd file
if (this._isCmdFile()) {
cmd += toolPath
for (const a of args) {
cmd += ` ${a}`
}
}
// Windows + verbatim
else if (options.windowsVerbatimArguments) {
cmd += `"${toolPath}"`
for (const a of args) {
cmd += ` ${a}`
}
}
// Windows (regular)
else {
cmd += this._windowsQuoteCmdArg(toolPath)
for (const a of args) {
cmd += ` ${this._windowsQuoteCmdArg(a)}`
}
}
} else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath
for (const a of args) {
cmd += ` ${a}`
}
}
return cmd
}
private _processLineBuffer(
data: Buffer,
strBuffer: string,
onLine: (line: string) => void
): string {
try {
let s = strBuffer + data.toString()
let n = s.indexOf(os.EOL)
while (n > -1) {
const line = s.substring(0, n)
onLine(line)
// the rest of the string ...
s = s.substring(n + os.EOL.length)
n = s.indexOf(os.EOL)
}
return s
} catch (err) {
// streaming lines to console is best effort. Don't fail a build.
this._debug(`error processing line. Failed with error ${err}`)
return ''
}
}
private _getSpawnFileName(): string {
if (IS_WINDOWS) {
if (this._isCmdFile()) {
return process.env['COMSPEC'] || 'cmd.exe'
}
}
return this.toolPath
}
private _getSpawnArgs(options: im.ExecOptions): string[] {
if (IS_WINDOWS) {
if (this._isCmdFile()) {
let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`
for (const a of this.args) {
argline += ' '
argline += options.windowsVerbatimArguments
? a
: this._windowsQuoteCmdArg(a)
}
argline += '"'
return [argline]
}
}
return this.args
}
private _endsWith(str: string, end: string): boolean {
return str.endsWith(end)
}
private _isCmdFile(): boolean {
const upperToolPath: string = this.toolPath.toUpperCase()
return (
this._endsWith(upperToolPath, '.CMD') ||
this._endsWith(upperToolPath, '.BAT')
)
}
private _windowsQuoteCmdArg(arg: string): string {
// for .exe, apply the normal quoting rules that libuv applies
if (!this._isCmdFile()) {
return this._uvQuoteCmdArg(arg)
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if (!arg) {
return '""'
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ',
'\t',
'&',
'(',
')',
'[',
']',
'{',
'}',
'^',
'=',
';',
'!',
"'",
'+',
',',
'`',
'~',
'|',
'<',
'>',
'"'
]
let needsQuotes = false
for (const char of arg) {
if (cmdSpecialChars.some(x => x === char)) {
needsQuotes = true
break
}
}
// short-circuit if quotes not needed
if (!needsQuotes) {
return arg
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"'
let quoteHit = true
for (let i = arg.length; i > 0; i--) {
// walk the string in reverse
reverse += arg[i - 1]
if (quoteHit && arg[i - 1] === '\\') {
reverse += '\\' // double the slash
} else if (arg[i - 1] === '"') {
quoteHit = true
reverse += '"' // double the quote
} else {
quoteHit = false
}
}
reverse += '"'
return reverse
.split('')
.reverse()
.join('')
}
private _uvQuoteCmdArg(arg: string): string {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if (!arg) {
// Need double quotation for empty argument
return '""'
}
if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
// No quotation needed
return arg
}
if (!arg.includes('"') && !arg.includes('\\')) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return `"${arg}"`
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"'
let quoteHit = true
for (let i = arg.length; i > 0; i--) {
// walk the string in reverse
reverse += arg[i - 1]
if (quoteHit && arg[i - 1] === '\\') {
reverse += '\\'
} else if (arg[i - 1] === '"') {
quoteHit = true
reverse += '\\'
} else {
quoteHit = false
}
}
reverse += '"'
return reverse
.split('')
.reverse()
.join('')
}
private _cloneExecOptions(options?: im.ExecOptions): im.ExecOptions {
options = options || <im.ExecOptions>{}
const result: im.ExecOptions = <im.ExecOptions>{
cwd: options.cwd || process.cwd(),
env: options.env || process.env,
silent: options.silent || false,
windowsVerbatimArguments: options.windowsVerbatimArguments || false,
failOnStdErr: options.failOnStdErr || false,
ignoreReturnCode: options.ignoreReturnCode || false,
delay: options.delay || 10000
}
result.outStream = options.outStream || <stream.Writable>process.stdout
result.errStream = options.errStream || <stream.Writable>process.stderr
return result
}
private _getSpawnOptions(
options: im.ExecOptions,
toolPath: string
): child.SpawnOptions {
options = options || <im.ExecOptions>{}
const result = <child.SpawnOptions>{}
result.cwd = options.cwd
result.env = options.env
result['windowsVerbatimArguments'] =
options.windowsVerbatimArguments || this._isCmdFile()
if (options.windowsVerbatimArguments) {
result.argv0 = `"${toolPath}"`
}
return result
}
/**
* Exec a tool.
* Output will be streamed to the live console.
* Returns promise with return code
*
* @param tool path to tool to exec
* @param options optional exec options. See ExecOptions
* @returns number
*/
async exec(): Promise<number> {
// root the tool path if it is unrooted and contains relative pathing
if (
!ioUtil.isRooted(this.toolPath) &&
(this.toolPath.includes('/') ||
(IS_WINDOWS && this.toolPath.includes('\\')))
) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this.toolPath = path.resolve(
process.cwd(),
this.options.cwd || process.cwd(),
this.toolPath
)
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this.toolPath = await io.which(this.toolPath, true)
return new Promise<number>(async (resolve, reject) => {
this._debug(`exec tool: ${this.toolPath}`)
this._debug('arguments:')
for (const arg of this.args) {
this._debug(` ${arg}`)
}
const optionsNonNull = this._cloneExecOptions(this.options)
if (!optionsNonNull.silent && optionsNonNull.outStream) {
optionsNonNull.outStream.write(
this._getCommandString(optionsNonNull) + os.EOL
)
}
const state = new ExecState(optionsNonNull, this.toolPath)
state.on('debug', (message: string) => {
this._debug(message)
})
if (this.options.cwd && !(await ioUtil.exists(this.options.cwd))) {
return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`))
}
const fileName = this._getSpawnFileName()
const cp = child.spawn(
fileName,
this._getSpawnArgs(optionsNonNull),
this._getSpawnOptions(this.options, fileName)
)
let stdbuffer = ''
if (cp.stdout) {
cp.stdout.on('data', (data: Buffer) => {
if (this.options.listeners && this.options.listeners.stdout) {
this.options.listeners.stdout(data)
}
if (!optionsNonNull.silent && optionsNonNull.outStream) {
optionsNonNull.outStream.write(data)
}
stdbuffer = this._processLineBuffer(
data,
stdbuffer,
(line: string) => {
if (this.options.listeners && this.options.listeners.stdline) {
this.options.listeners.stdline(line)
}
}
)
})
}
let errbuffer = ''
if (cp.stderr) {
cp.stderr.on('data', (data: Buffer) => {
state.processStderr = true
if (this.options.listeners && this.options.listeners.stderr) {
this.options.listeners.stderr(data)
}
if (
!optionsNonNull.silent &&
optionsNonNull.errStream &&
optionsNonNull.outStream
) {
const s = optionsNonNull.failOnStdErr
? optionsNonNull.errStream
: optionsNonNull.outStream
s.write(data)
}
errbuffer = this._processLineBuffer(
data,
errbuffer,
(line: string) => {
if (this.options.listeners && this.options.listeners.errline) {
this.options.listeners.errline(line)
}
}
)
})
}
cp.on('error', (err: Error) => {
state.processError = err.message
state.processExited = true
state.processClosed = true
state.CheckComplete()
})
cp.on('exit', (code: number) => {
state.processExitCode = code
state.processExited = true
this._debug(`Exit code ${code} received from tool '${this.toolPath}'`)
state.CheckComplete()
})
cp.on('close', (code: number) => {
state.processExitCode = code
state.processExited = true
state.processClosed = true
this._debug(`STDIO streams have closed for tool '${this.toolPath}'`)
state.CheckComplete()
})
state.on('done', (error: Error, exitCode: number) => {
if (stdbuffer.length > 0) {
this.emit('stdline', stdbuffer)
}
if (errbuffer.length > 0) {
this.emit('errline', errbuffer)
}
cp.removeAllListeners()
if (error) {
reject(error)
} else {
resolve(exitCode)
}
})
if (this.options.input) {
if (!cp.stdin) {
throw new Error('child process missing stdin')
}
cp.stdin.end(this.options.input)
}
})
}
}
/**
* Convert an arg string to an array of args. Handles escaping
*
* @param argString string of arguments
* @returns string[] array of arguments
*/
export function argStringToArray(argString: string): string[] {
const args: string[] = []
let inQuotes = false
let escaped = false
let arg = ''
function append(c: string): void {
// we only escape double quotes.
if (escaped && c !== '"') {
arg += '\\'
}
arg += c
escaped = false
}
for (let i = 0; i < argString.length; i++) {
const c = argString.charAt(i)
if (c === '"') {
if (!escaped) {
inQuotes = !inQuotes
} else {
append(c)
}
continue
}
if (c === '\\' && escaped) {
append(c)
continue
}
if (c === '\\' && inQuotes) {
escaped = true
continue
}
if (c === ' ' && !inQuotes) {
if (arg.length > 0) {
args.push(arg)
arg = ''
}
continue
}
append(c)
}
if (arg.length > 0) {
args.push(arg.trim())
}
return args
}
class ExecState extends events.EventEmitter {
constructor(options: im.ExecOptions, toolPath: string) {
super()
if (!toolPath) {
throw new Error('toolPath must not be empty')
}
this.options = options
this.toolPath = toolPath
if (options.delay) {
this.delay = options.delay
}
}
processClosed = false // tracks whether the process has exited and stdio is closed
processError = ''
processExitCode = 0
processExited = false // tracks whether the process has exited
processStderr = false // tracks whether stderr was written to
private delay = 10000 // 10 seconds
private done = false
private options: im.ExecOptions
private timeout: NodeJS.Timer | null = null
private toolPath: string
CheckComplete(): void {
if (this.done) {
return
}
if (this.processClosed) {
this._setResult()
} else if (this.processExited) {
this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this)
}
}
private _debug(message: string): void {
this.emit('debug', message)
}
private _setResult(): void {
// determine whether there is an error
let error: Error | undefined
if (this.processExited) {
if (this.processError) {
error = new Error(
`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`
)
} else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
error = new Error(
`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`
)
} else if (this.processStderr && this.options.failOnStdErr) {
error = new Error(
`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`
)
}
}
// clear the timeout
if (this.timeout) {
clearTimeout(this.timeout)
this.timeout = null
}
this.done = true
this.emit('done', error, this.processExitCode)
}
private static HandleTimeout(state: ExecState): void {
if (state.done) {
return
}
if (!state.processClosed && state.processExited) {
const message = `The STDIO streams did not close within ${state.delay /
1000} seconds of the exit event from process '${
state.toolPath
}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`
state._debug(message)
}
state._setResult()
}
} | the_stack |
import { Bucket } from '@aws-cdk/aws-s3';
import { Aws, Stack } from '@aws-cdk/core';
import * as ec2 from '../lib';
describe('user data', () => {
test('can create Windows user data', () => {
// GIVEN
// WHEN
const userData = ec2.UserData.forWindows();
userData.addCommands('command1', 'command2');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>command1\ncommand2</powershell>');
});
test('can create Windows user data with commands on exit', () => {
// GIVEN
const userData = ec2.UserData.forWindows();
// WHEN
userData.addCommands('command1', 'command2');
userData.addOnExitCommands('onexit1', 'onexit2');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>trap {\n' +
'$success=($PSItem.Exception.Message -eq "Success")\n' +
'onexit1\n' +
'onexit2\n' +
'break\n' +
'}\n' +
'command1\n' +
'command2\n' +
'throw "Success"</powershell>');
});
test('can create Windows with Signal Command', () => {
// GIVEN
const stack = new Stack();
const resource = new ec2.Vpc(stack, 'RESOURCE');
const userData = ec2.UserData.forWindows();
// WHEN
userData.addSignalOnExitCommand( resource );
userData.addCommands('command1');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>trap {\n' +
'$success=($PSItem.Exception.Message -eq "Success")\n' +
`cfn-signal --stack Default --resource RESOURCE1989552F --region ${Aws.REGION} --success ($success.ToString().ToLower())\n` +
'break\n' +
'}\n' +
'command1\n' +
'throw "Success"</powershell>',
);
});
test('can windows userdata download S3 files', () => {
// GIVEN
const stack = new Stack();
const userData = ec2.UserData.forWindows();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
const bucket2 = Bucket.fromBucketName( stack, 'testBucket2', 'test2' );
// WHEN
userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.bat',
} );
userData.addS3DownloadCommand({
bucket: bucket2,
bucketKey: 'filename2.bat',
localFile: 'c:\\test\\location\\otherScript.bat',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>mkdir (Split-Path -Path \'C:/temp/filename.bat\' ) -ea 0\n' +
'Read-S3Object -BucketName \'test\' -key \'filename.bat\' -file \'C:/temp/filename.bat\' -ErrorAction Stop\n' +
'mkdir (Split-Path -Path \'c:\\test\\location\\otherScript.bat\' ) -ea 0\n' +
'Read-S3Object -BucketName \'test2\' -key \'filename2.bat\' -file \'c:\\test\\location\\otherScript.bat\' -ErrorAction Stop</powershell>',
);
});
test('can windows userdata download S3 files with given region', () => {
// GIVEN
const stack = new Stack();
const userData = ec2.UserData.forWindows();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
const bucket2 = Bucket.fromBucketName( stack, 'testBucket2', 'test2' );
// WHEN
userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.bat',
region: 'us-east-1',
} );
userData.addS3DownloadCommand({
bucket: bucket2,
bucketKey: 'filename2.bat',
localFile: 'c:\\test\\location\\otherScript.bat',
region: 'us-east-1',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>mkdir (Split-Path -Path \'C:/temp/filename.bat\' ) -ea 0\n' +
'Read-S3Object -BucketName \'test\' -key \'filename.bat\' -file \'C:/temp/filename.bat\' -ErrorAction Stop -Region us-east-1\n' +
'mkdir (Split-Path -Path \'c:\\test\\location\\otherScript.bat\' ) -ea 0\n' +
'Read-S3Object -BucketName \'test2\' -key \'filename2.bat\' -file \'c:\\test\\location\\otherScript.bat\' -ErrorAction Stop -Region us-east-1</powershell>',
);
});
test('can windows userdata execute files', () => {
// GIVEN
const userData = ec2.UserData.forWindows();
// WHEN
userData.addExecuteFileCommand({
filePath: 'C:\\test\\filename.bat',
} );
userData.addExecuteFileCommand({
filePath: 'C:\\test\\filename2.bat',
arguments: 'arg1 arg2 -arg $variable',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('<powershell>&\'C:\\test\\filename.bat\'\n' +
'if (!$?) { Write-Error \'Failed to execute the file "C:\\test\\filename.bat"\' -ErrorAction Stop }\n' +
'&\'C:\\test\\filename2.bat\' arg1 arg2 -arg $variable\n' +
'if (!$?) { Write-Error \'Failed to execute the file "C:\\test\\filename2.bat"\' -ErrorAction Stop }</powershell>',
);
});
test('can create Linux user data', () => {
// GIVEN
// WHEN
const userData = ec2.UserData.forLinux();
userData.addCommands('command1', 'command2');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\ncommand1\ncommand2');
});
test('can create Linux user data with commands on exit', () => {
// GIVEN
const userData = ec2.UserData.forLinux();
// WHEN
userData.addCommands('command1', 'command2');
userData.addOnExitCommands('onexit1', 'onexit2');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\n' +
'function exitTrap(){\n' +
'exitCode=$?\n' +
'onexit1\n' +
'onexit2\n' +
'}\n' +
'trap exitTrap EXIT\n' +
'command1\n' +
'command2');
});
test('can create Linux with Signal Command', () => {
// GIVEN
const stack = new Stack();
const resource = new ec2.Vpc(stack, 'RESOURCE');
// WHEN
const userData = ec2.UserData.forLinux();
userData.addCommands('command1');
userData.addSignalOnExitCommand( resource );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\n' +
'function exitTrap(){\n' +
'exitCode=$?\n' +
`/opt/aws/bin/cfn-signal --stack Default --resource RESOURCE1989552F --region ${Aws.REGION} -e $exitCode || echo \'Failed to send Cloudformation Signal\'\n` +
'}\n' +
'trap exitTrap EXIT\n' +
'command1');
});
test('can linux userdata download S3 files', () => {
// GIVEN
const stack = new Stack();
const userData = ec2.UserData.forLinux();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
const bucket2 = Bucket.fromBucketName( stack, 'testBucket2', 'test2' );
// WHEN
userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.sh',
} );
userData.addS3DownloadCommand({
bucket: bucket2,
bucketKey: 'filename2.sh',
localFile: 'c:\\test\\location\\otherScript.sh',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\n' +
'mkdir -p $(dirname \'/tmp/filename.sh\')\n' +
'aws s3 cp \'s3://test/filename.sh\' \'/tmp/filename.sh\'\n' +
'mkdir -p $(dirname \'c:\\test\\location\\otherScript.sh\')\n' +
'aws s3 cp \'s3://test2/filename2.sh\' \'c:\\test\\location\\otherScript.sh\'',
);
});
test('can linux userdata download S3 files from specific region', () => {
// GIVEN
const stack = new Stack();
const userData = ec2.UserData.forLinux();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
const bucket2 = Bucket.fromBucketName( stack, 'testBucket2', 'test2' );
// WHEN
userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.sh',
region: 'us-east-1',
} );
userData.addS3DownloadCommand({
bucket: bucket2,
bucketKey: 'filename2.sh',
localFile: 'c:\\test\\location\\otherScript.sh',
region: 'us-east-1',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\n' +
'mkdir -p $(dirname \'/tmp/filename.sh\')\n' +
'aws s3 cp \'s3://test/filename.sh\' \'/tmp/filename.sh\' --region us-east-1\n' +
'mkdir -p $(dirname \'c:\\test\\location\\otherScript.sh\')\n' +
'aws s3 cp \'s3://test2/filename2.sh\' \'c:\\test\\location\\otherScript.sh\' --region us-east-1',
);
});
test('can linux userdata execute files', () => {
// GIVEN
const userData = ec2.UserData.forLinux();
// WHEN
userData.addExecuteFileCommand({
filePath: '/tmp/filename.sh',
} );
userData.addExecuteFileCommand({
filePath: '/test/filename2.sh',
arguments: 'arg1 arg2 -arg $variable',
} );
// THEN
const rendered = userData.render();
expect(rendered).toEqual('#!/bin/bash\n' +
'set -e\n' +
'chmod +x \'/tmp/filename.sh\'\n' +
'\'/tmp/filename.sh\'\n' +
'set -e\n' +
'chmod +x \'/test/filename2.sh\'\n' +
'\'/test/filename2.sh\' arg1 arg2 -arg $variable',
);
});
test('can create Custom user data', () => {
// GIVEN
// WHEN
const userData = ec2.UserData.custom('Some\nmultiline\ncontent');
// THEN
const rendered = userData.render();
expect(rendered).toEqual('Some\nmultiline\ncontent');
});
test('Custom user data throws when adding on exit commands', () => {
// GIVEN
// WHEN
const userData = ec2.UserData.custom('');
// THEN
expect(() => userData.addOnExitCommands( 'a command goes here' )).toThrow();
});
test('Custom user data throws when adding signal command', () => {
// GIVEN
const stack = new Stack();
const resource = new ec2.Vpc(stack, 'RESOURCE');
// WHEN
const userData = ec2.UserData.custom('');
// THEN
expect(() => userData.addSignalOnExitCommand( resource )).toThrow();
});
test('Custom user data throws when downloading file', () => {
// GIVEN
const stack = new Stack();
const userData = ec2.UserData.custom('');
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
// WHEN
// THEN
expect(() => userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.sh',
})).toThrow();
});
test('Custom user data throws when executing file', () => {
// GIVEN
const userData = ec2.UserData.custom('');
// WHEN
// THEN
expect(() =>
userData.addExecuteFileCommand({
filePath: '/tmp/filename.sh',
})).toThrow();
});
test('Linux user rendering multipart headers', () => {
// GIVEN
const stack = new Stack();
const linuxUserData = ec2.UserData.forLinux();
linuxUserData.addCommands('echo "Hello world"');
// WHEN
const defaultRender1 = ec2.MultipartBody.fromUserData(linuxUserData);
const defaultRender2 = ec2.MultipartBody.fromUserData(linuxUserData, 'text/cloud-boothook; charset=\"utf-8\"');
// THEN
expect(stack.resolve(defaultRender1.renderBodyPart())).toEqual([
'Content-Type: text/x-shellscript; charset=\"utf-8\"',
'Content-Transfer-Encoding: base64',
'',
{ 'Fn::Base64': '#!/bin/bash\necho \"Hello world\"' },
]);
expect(stack.resolve(defaultRender2.renderBodyPart())).toEqual([
'Content-Type: text/cloud-boothook; charset=\"utf-8\"',
'Content-Transfer-Encoding: base64',
'',
{ 'Fn::Base64': '#!/bin/bash\necho \"Hello world\"' },
]);
});
test('Default parts separator used, if not specified', () => {
// GIVEN
const multipart = new ec2.MultipartUserData();
multipart.addPart(ec2.MultipartBody.fromRawBody({
contentType: 'CT',
}));
// WHEN
const out = multipart.render();
// WHEN
expect(out).toEqual([
'Content-Type: multipart/mixed; boundary=\"+AWS+CDK+User+Data+Separator==\"',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: CT',
'',
'--+AWS+CDK+User+Data+Separator==--',
'',
].join('\n'));
});
test('Non-default parts separator used, if not specified', () => {
// GIVEN
const multipart = new ec2.MultipartUserData({
partsSeparator: '//',
});
multipart.addPart(ec2.MultipartBody.fromRawBody({
contentType: 'CT',
}));
// WHEN
const out = multipart.render();
// WHEN
expect(out).toEqual([
'Content-Type: multipart/mixed; boundary=\"//\"',
'MIME-Version: 1.0',
'',
'--//',
'Content-Type: CT',
'',
'--//--',
'',
].join('\n'));
});
test('Multipart separator validation', () => {
// Happy path
new ec2.MultipartUserData();
new ec2.MultipartUserData({
partsSeparator: 'a-zA-Z0-9()+,-./:=?',
});
[' ', '\n', '\r', '[', ']', '<', '>', '違う'].forEach(s => expect(() => {
new ec2.MultipartUserData({
partsSeparator: s,
});
}).toThrow(/Invalid characters in separator/));
});
test('Multipart user data throws when adding on exit commands', () => {
// GIVEN
// WHEN
const userData = new ec2.MultipartUserData();
// THEN
expect(() => userData.addOnExitCommands( 'a command goes here' )).toThrow();
});
test('Multipart user data throws when adding signal command', () => {
// GIVEN
const stack = new Stack();
const resource = new ec2.Vpc(stack, 'RESOURCE');
// WHEN
const userData = new ec2.MultipartUserData();
// THEN
expect(() => userData.addSignalOnExitCommand( resource )).toThrow();
});
test('Multipart user data throws when downloading file', () => {
// GIVEN
const stack = new Stack();
const userData = new ec2.MultipartUserData();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
// WHEN
// THEN
expect(() => userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.sh',
} )).toThrow();
});
test('Multipart user data throws when executing file', () => {
// GIVEN
const userData = new ec2.MultipartUserData();
// WHEN
// THEN
expect(() =>
userData.addExecuteFileCommand({
filePath: '/tmp/filename.sh',
} )).toThrow();
});
test('can add commands to Multipart user data', () => {
// GIVEN
const stack = new Stack();
const innerUserData = ec2.UserData.forLinux();
const userData = new ec2.MultipartUserData();
// WHEN
userData.addUserDataPart(innerUserData, ec2.MultipartBody.SHELL_SCRIPT, true);
userData.addCommands('command1', 'command2');
// THEN
const expectedInner = '#!/bin/bash\ncommand1\ncommand2';
const rendered = innerUserData.render();
expect(rendered).toEqual(expectedInner);
const out = stack.resolve(userData.render());
expect(out).toEqual({
'Fn::Join': [
'',
[
[
'Content-Type: multipart/mixed; boundary="+AWS+CDK+User+Data+Separator=="',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: text/x-shellscript; charset="utf-8"',
'Content-Transfer-Encoding: base64',
'',
'',
].join('\n'),
{
'Fn::Base64': expectedInner,
},
'\n--+AWS+CDK+User+Data+Separator==--\n',
],
],
});
});
test('can add commands on exit to Multipart user data', () => {
// GIVEN
const stack = new Stack();
const innerUserData = ec2.UserData.forLinux();
const userData = new ec2.MultipartUserData();
// WHEN
userData.addUserDataPart(innerUserData, ec2.MultipartBody.SHELL_SCRIPT, true);
userData.addCommands('command1', 'command2');
userData.addOnExitCommands('onexit1', 'onexit2');
// THEN
const expectedInner = '#!/bin/bash\n' +
'function exitTrap(){\n' +
'exitCode=$?\n' +
'onexit1\n' +
'onexit2\n' +
'}\n' +
'trap exitTrap EXIT\n' +
'command1\n' +
'command2';
const rendered = stack.resolve(innerUserData.render());
expect(rendered).toEqual(expectedInner);
const out = stack.resolve(userData.render());
expect(out).toEqual({
'Fn::Join': [
'',
[
[
'Content-Type: multipart/mixed; boundary="+AWS+CDK+User+Data+Separator=="',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: text/x-shellscript; charset="utf-8"',
'Content-Transfer-Encoding: base64',
'',
'',
].join('\n'),
{
'Fn::Base64': expectedInner,
},
'\n--+AWS+CDK+User+Data+Separator==--\n',
],
],
});
});
test('can add Signal Command to Multipart user data', () => {
// GIVEN
const stack = new Stack();
const resource = new ec2.Vpc(stack, 'RESOURCE');
const innerUserData = ec2.UserData.forLinux();
const userData = new ec2.MultipartUserData();
// WHEN
userData.addUserDataPart(innerUserData, ec2.MultipartBody.SHELL_SCRIPT, true);
userData.addCommands('command1');
userData.addSignalOnExitCommand( resource );
// THEN
const expectedInner = stack.resolve('#!/bin/bash\n' +
'function exitTrap(){\n' +
'exitCode=$?\n' +
`/opt/aws/bin/cfn-signal --stack Default --resource RESOURCE1989552F --region ${Aws.REGION} -e $exitCode || echo \'Failed to send Cloudformation Signal\'\n` +
'}\n' +
'trap exitTrap EXIT\n' +
'command1');
const rendered = stack.resolve(innerUserData.render());
expect(rendered).toEqual(expectedInner);
const out = stack.resolve(userData.render());
expect(out).toEqual({
'Fn::Join': [
'',
[
[
'Content-Type: multipart/mixed; boundary="+AWS+CDK+User+Data+Separator=="',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: text/x-shellscript; charset="utf-8"',
'Content-Transfer-Encoding: base64',
'',
'',
].join('\n'),
{
'Fn::Base64': expectedInner,
},
'\n--+AWS+CDK+User+Data+Separator==--\n',
],
],
});
});
test('can add download S3 files to Multipart user data', () => {
// GIVEN
const stack = new Stack();
const innerUserData = ec2.UserData.forLinux();
const userData = new ec2.MultipartUserData();
const bucket = Bucket.fromBucketName( stack, 'testBucket', 'test' );
const bucket2 = Bucket.fromBucketName( stack, 'testBucket2', 'test2' );
// WHEN
userData.addUserDataPart(innerUserData, ec2.MultipartBody.SHELL_SCRIPT, true);
userData.addS3DownloadCommand({
bucket,
bucketKey: 'filename.sh',
} );
userData.addS3DownloadCommand({
bucket: bucket2,
bucketKey: 'filename2.sh',
localFile: 'c:\\test\\location\\otherScript.sh',
} );
// THEN
const expectedInner = '#!/bin/bash\n' +
'mkdir -p $(dirname \'/tmp/filename.sh\')\n' +
'aws s3 cp \'s3://test/filename.sh\' \'/tmp/filename.sh\'\n' +
'mkdir -p $(dirname \'c:\\test\\location\\otherScript.sh\')\n' +
'aws s3 cp \'s3://test2/filename2.sh\' \'c:\\test\\location\\otherScript.sh\'';
const rendered = stack.resolve(innerUserData.render());
expect(rendered).toEqual(expectedInner);
const out = stack.resolve(userData.render());
expect(out).toEqual({
'Fn::Join': [
'',
[
[
'Content-Type: multipart/mixed; boundary="+AWS+CDK+User+Data+Separator=="',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: text/x-shellscript; charset="utf-8"',
'Content-Transfer-Encoding: base64',
'',
'',
].join('\n'),
{
'Fn::Base64': expectedInner,
},
'\n--+AWS+CDK+User+Data+Separator==--\n',
],
],
});
});
test('can add execute files to Multipart user data', () => {
// GIVEN
const stack = new Stack();
const innerUserData = ec2.UserData.forLinux();
const userData = new ec2.MultipartUserData();
// WHEN
userData.addUserDataPart(innerUserData, ec2.MultipartBody.SHELL_SCRIPT, true);
userData.addExecuteFileCommand({
filePath: '/tmp/filename.sh',
} );
userData.addExecuteFileCommand({
filePath: '/test/filename2.sh',
arguments: 'arg1 arg2 -arg $variable',
} );
// THEN
const expectedInner = '#!/bin/bash\n' +
'set -e\n' +
'chmod +x \'/tmp/filename.sh\'\n' +
'\'/tmp/filename.sh\'\n' +
'set -e\n' +
'chmod +x \'/test/filename2.sh\'\n' +
'\'/test/filename2.sh\' arg1 arg2 -arg $variable';
const rendered = stack.resolve(innerUserData.render());
expect(rendered).toEqual(expectedInner);
const out = stack.resolve(userData.render());
expect(out).toEqual({
'Fn::Join': [
'',
[
[
'Content-Type: multipart/mixed; boundary="+AWS+CDK+User+Data+Separator=="',
'MIME-Version: 1.0',
'',
'--+AWS+CDK+User+Data+Separator==',
'Content-Type: text/x-shellscript; charset="utf-8"',
'Content-Transfer-Encoding: base64',
'',
'',
].join('\n'),
{
'Fn::Base64': expectedInner,
},
'\n--+AWS+CDK+User+Data+Separator==--\n',
],
],
});
});
}); | the_stack |
import { Component, OnInit, OnDestroy, ViewChild, Input, AfterViewInit } from '@angular/core';
import { DatePipe, DecimalPipe } from '@angular/common';
import { Subject } from 'rxjs';
import { takeUntil, take } from 'rxjs/operators';
import { Store } from '@ngrx/store';
import { faHistory } from '@fortawesome/free-solid-svg-icons';
import { MatPaginator, MatPaginatorIntl } from '@angular/material/paginator';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { GetInfo, Payment, PayRequest, PaymentHTLC, Peer, Hop, ListPayments, ListInvoices } from '../../../shared/models/lndModels';
import { PAGE_SIZE, PAGE_SIZE_OPTIONS, getPaginatorLabel, AlertTypeEnum, DataTypeEnum, ScreenSizeEnum, CurrencyUnitEnum, CURRENCY_UNIT_FORMATS, APICallStatusEnum, UI_MESSAGES } from '../../../shared/services/consts-enums-functions';
import { LoggerService } from '../../../shared/services/logger.service';
import { CommonService } from '../../../shared/services/common.service';
import { DataService } from '../../../shared/services/data.service';
import { ApiCallStatusPayload } from '../../../shared/models/apiCallsPayload';
import { SelNodeChild } from '../../../shared/models/RTLconfig';
import { LightningSendPaymentsComponent } from '../send-payment-modal/send-payment.component';
import { LNDEffects } from '../../store/lnd.effects';
import { RTLEffects } from '../../../store/rtl.effects';
import { RTLState } from '../../../store/rtl.state';
import { openAlert, openConfirmation } from '../../../store/rtl.actions';
import { fetchPayments, sendPayment } from '../../store/lnd.actions';
import { lndNodeInformation, lndNodeSettings, payments, peers } from '../../store/lnd.selector';
@Component({
selector: 'rtl-lightning-payments',
templateUrl: './lightning-payments.component.html',
styleUrls: ['./lightning-payments.component.scss'],
providers: [
{ provide: MatPaginatorIntl, useValue: getPaginatorLabel('Payments') }
]
})
export class LightningPaymentsComponent implements OnInit, AfterViewInit, OnDestroy {
@Input() calledFrom = 'transactions'; // Transactions/home
@ViewChild('sendPaymentForm', { static: false }) form; // Static should be false due to ngIf on form element
@ViewChild(MatSort, { static: false }) sort: MatSort | undefined;
@ViewChild(MatPaginator, { static: false }) paginator: MatPaginator | undefined;
public faHistory = faHistory;
public newlyAddedPayment = '';
public selNode: SelNodeChild = {};
public information: GetInfo = {};
public peers: Peer[] = [];
public payments: any;
public totalPayments = 100;
public paymentJSONArr: Payment[] = [];
public displayedColumns: any[] = [];
public htlcColumns = [];
public paymentDecoded: PayRequest = {};
public paymentRequest = '';
public paymentDecodedHint = '';
public flgSticky = false;
private firstOffset = -1;
private lastOffset = -1;
public selFilter = '';
public pageSize = PAGE_SIZE;
public pageSizeOptions = PAGE_SIZE_OPTIONS;
public screenSize = '';
public screenSizeEnum = ScreenSizeEnum;
public errorMessage = '';
public apiCallStatus: ApiCallStatusPayload = null;
public apiCallStatusEnum = APICallStatusEnum;
private unSubs: Array<Subject<void>> = [new Subject(), new Subject(), new Subject(), new Subject(), new Subject(), new Subject(), new Subject(), new Subject(), new Subject(), new Subject()];
constructor(private logger: LoggerService, private commonService: CommonService, private dataService: DataService, private store: Store<RTLState>, private rtlEffects: RTLEffects, private lndEffects: LNDEffects, private decimalPipe: DecimalPipe, private datePipe: DatePipe) {
this.screenSize = this.commonService.getScreenSize();
if (this.screenSize === ScreenSizeEnum.XS) {
this.flgSticky = false;
this.displayedColumns = ['creation_date', 'fee', 'actions'];
this.htlcColumns = ['groupTotal', 'groupFee', 'groupAction'];
} else if (this.screenSize === ScreenSizeEnum.SM) {
this.flgSticky = false;
this.displayedColumns = ['creation_date', 'fee', 'value', 'hops', 'actions'];
this.htlcColumns = ['groupTotal', 'groupFee', 'groupValue', 'groupHops', 'groupAction'];
} else if (this.screenSize === ScreenSizeEnum.MD) {
this.flgSticky = false;
this.displayedColumns = ['creation_date', 'fee', 'value', 'hops', 'actions'];
this.htlcColumns = ['groupTotal', 'groupFee', 'groupValue', 'groupHops', 'groupAction'];
} else {
this.flgSticky = true;
this.displayedColumns = ['creation_date', 'payment_hash', 'fee', 'value', 'hops', 'actions'];
this.htlcColumns = ['groupTotal', 'groupHash', 'groupFee', 'groupValue', 'groupHops', 'groupAction'];
}
}
ngOnInit() {
this.store.select(lndNodeSettings).pipe(takeUntil(this.unSubs[0])).subscribe((nodeSettings: SelNodeChild) => { this.selNode = nodeSettings; });
this.store.select(lndNodeInformation).pipe(takeUntil(this.unSubs[1])).subscribe((nodeInfo: GetInfo) => { this.information = nodeInfo; });
this.store.select(peers).pipe(takeUntil(this.unSubs[2])).
subscribe((peersSelector: { peers: Peer[], apiCallStatus: ApiCallStatusPayload }) => {
this.peers = peersSelector.peers;
});
this.store.select(payments).pipe(takeUntil(this.unSubs[3])).
subscribe((paymentsSelector: { listPayments: ListPayments, apiCallStatus: ApiCallStatusPayload }) => {
this.errorMessage = '';
this.apiCallStatus = paymentsSelector.apiCallStatus;
if (this.apiCallStatus.status === APICallStatusEnum.ERROR) {
this.errorMessage = (typeof (this.apiCallStatus.message) === 'object') ? JSON.stringify(this.apiCallStatus.message) : this.apiCallStatus.message;
}
this.paymentJSONArr = paymentsSelector.listPayments.payments || [];
this.totalPayments = this.paymentJSONArr.length;
this.firstOffset = +paymentsSelector.listPayments.first_index_offset;
this.lastOffset = +paymentsSelector.listPayments.last_index_offset;
if (this.paymentJSONArr && this.paymentJSONArr.length > 0 && this.sort && this.paginator) {
// this.loadPaymentsTable(this.paymentJSONArr);
this.loadPaymentsTable(this.paymentJSONArr.slice(0, this.pageSize));
}
this.logger.info(paymentsSelector);
});
}
ngAfterViewInit() {
if (this.paymentJSONArr && this.paymentJSONArr.length > 0) {
// this.loadPaymentsTable(this.paymentJSONArr);
this.loadPaymentsTable(this.paymentJSONArr.slice(0, this.pageSize));
}
}
onSendPayment(): boolean | void {
if (!this.paymentRequest) {
return true;
}
if (this.paymentDecoded.timestamp) {
this.sendPayment();
} else {
this.dataService.decodePayment(this.paymentRequest, false).
pipe(take(1)).subscribe((decodedPayment: PayRequest) => {
this.paymentDecoded = decodedPayment;
if (this.paymentDecoded.timestamp) {
if (this.paymentDecoded.num_msat && !this.paymentDecoded.num_satoshis) {
this.paymentDecoded.num_satoshis = (+this.paymentDecoded.num_msat / 1000).toString();
} else {
this.paymentDecoded.num_satoshis = '0';
}
this.sendPayment();
} else {
this.resetData();
}
});
}
}
sendPayment() {
this.newlyAddedPayment = this.paymentDecoded.payment_hash;
if (this.paymentDecoded.num_msat && !this.paymentDecoded.num_satoshis) {
this.paymentDecoded.num_satoshis = (+this.paymentDecoded.num_msat / 1000).toString();
}
if (!this.paymentDecoded.num_satoshis || this.paymentDecoded.num_satoshis === '' || this.paymentDecoded.num_satoshis === '0') {
const reorderedPaymentDecoded = [
[{ key: 'payment_hash', value: this.paymentDecoded.payment_hash, title: 'Payment Hash', width: 100 }],
[{ key: 'destination', value: this.paymentDecoded.destination, title: 'Destination', width: 100 }],
[{ key: 'description', value: this.paymentDecoded.description, title: 'Description', width: 100 }],
[{ key: 'timestamp', value: this.paymentDecoded.timestamp, title: 'Creation Date', width: 40, type: DataTypeEnum.DATE_TIME },
{ key: 'expiry', value: this.paymentDecoded.expiry, title: 'Expiry', width: 30, type: DataTypeEnum.NUMBER },
{ key: 'cltv_expiry', value: this.paymentDecoded.cltv_expiry, title: 'CLTV Expiry', width: 30 }]
];
const titleMsg = 'It is a zero amount invoice. Enter the amount (Sats) to pay.';
this.store.dispatch(openConfirmation({
payload: {
data: {
type: AlertTypeEnum.CONFIRM,
alertTitle: 'Enter Amount and Confirm Send Payment',
titleMessage: titleMsg,
message: reorderedPaymentDecoded,
noBtnText: 'Cancel',
yesBtnText: 'Send Payment',
flgShowInput: true,
getInputs: [
{ placeholder: 'Amount (Sats)', inputType: DataTypeEnum.NUMBER.toLowerCase(), inputValue: '', width: 30 }
]
}
}
}));
this.rtlEffects.closeConfirm.
pipe(take(1)).
subscribe((confirmRes) => {
if (confirmRes) {
this.paymentDecoded.num_satoshis = confirmRes[0].inputValue;
this.store.dispatch(sendPayment({ payload: { uiMessage: UI_MESSAGES.SEND_PAYMENT, paymentReq: this.paymentRequest, paymentAmount: confirmRes[0].inputValue, fromDialog: false } }));
this.resetData();
}
});
} else {
const reorderedPaymentDecoded = [
[{ key: 'payment_hash', value: this.paymentDecoded.payment_hash, title: 'Payment Hash', width: 100 }],
[{ key: 'destination', value: this.paymentDecoded.destination, title: 'Destination', width: 100 }],
[{ key: 'description', value: this.paymentDecoded.description, title: 'Description', width: 100 }],
[{ key: 'timestamp', value: this.paymentDecoded.timestamp, title: 'Creation Date', width: 50, type: DataTypeEnum.DATE_TIME },
{ key: 'num_satoshis', value: this.paymentDecoded.num_satoshis, title: 'Amount (Sats)', width: 50, type: DataTypeEnum.NUMBER }],
[{ key: 'expiry', value: this.paymentDecoded.expiry, title: 'Expiry', width: 50, type: DataTypeEnum.NUMBER },
{ key: 'cltv_expiry', value: this.paymentDecoded.cltv_expiry, title: 'CLTV Expiry', width: 50 }]
];
this.store.dispatch(openConfirmation({
payload: {
data: {
type: AlertTypeEnum.CONFIRM,
alertTitle: 'Confirm Send Payment',
noBtnText: 'Cancel',
yesBtnText: 'Send Payment',
message: reorderedPaymentDecoded
}
}
}));
this.rtlEffects.closeConfirm.
pipe(take(1)).
subscribe((confirmRes) => {
if (confirmRes) {
this.store.dispatch(sendPayment({ payload: { uiMessage: UI_MESSAGES.SEND_PAYMENT, paymentReq: this.paymentRequest, fromDialog: false } }));
this.resetData();
}
});
}
}
openSendPaymentModal() {
this.store.dispatch(openAlert({
payload: {
data: {
component: LightningSendPaymentsComponent
}
}
}));
}
onPaymentRequestEntry(event: any) {
this.paymentRequest = event;
this.paymentDecodedHint = '';
if (this.paymentRequest && this.paymentRequest.length > 100) {
this.dataService.decodePayment(this.paymentRequest, false).
pipe(take(1)).subscribe((decodedPayment: PayRequest) => {
this.paymentDecoded = decodedPayment;
if (this.paymentDecoded.num_msat && !this.paymentDecoded.num_satoshis) {
this.paymentDecoded.num_satoshis = (+this.paymentDecoded.num_msat / 1000).toString();
}
if (this.paymentDecoded.num_satoshis) {
if (this.selNode.fiatConversion) {
this.commonService.convertCurrency(+this.paymentDecoded.num_satoshis, CurrencyUnitEnum.SATS, CurrencyUnitEnum.OTHER, this.selNode.currencyUnits[2], this.selNode.fiatConversion).
pipe(takeUntil(this.unSubs[5])).
subscribe({
next: (data) => {
this.paymentDecodedHint = 'Sending: ' + this.decimalPipe.transform(this.paymentDecoded.num_satoshis ? this.paymentDecoded.num_satoshis : 0) + ' Sats (' + data.symbol + this.decimalPipe.transform((data.OTHER ? data.OTHER : 0), CURRENCY_UNIT_FORMATS.OTHER) + ') | Memo: ' + this.paymentDecoded.description;
}, error: (error) => {
this.paymentDecodedHint = 'Sending: ' + this.decimalPipe.transform(this.paymentDecoded.num_satoshis ? this.paymentDecoded.num_satoshis : 0) + ' Sats | Memo: ' + this.paymentDecoded.description + '. Unable to convert currency.';
}
});
} else {
this.paymentDecodedHint = 'Sending: ' + this.decimalPipe.transform(this.paymentDecoded.num_satoshis ? this.paymentDecoded.num_satoshis : 0) + ' Sats | Memo: ' + this.paymentDecoded.description;
}
} else {
this.paymentDecodedHint = 'Zero Amount Invoice | Memo: ' + this.paymentDecoded.description;
}
});
}
}
onPageChange(event: any) {
let reverse = true;
let index_offset = this.lastOffset;
let page_size = event.pageSize;
if (event.pageIndex === 0) {
reverse = true;
index_offset = 0;
} else if (event.pageIndex < event.previousPageIndex) {
reverse = false;
index_offset = this.lastOffset;
} else if (event.pageIndex > event.previousPageIndex && (event.length > ((event.pageIndex + 1) * event.pageSize))) {
reverse = true;
index_offset = this.firstOffset;
} else if (event.length <= ((event.pageIndex + 1) * event.pageSize)) {
reverse = false;
index_offset = 0;
page_size = event.length - (event.pageIndex * event.pageSize);
}
const starting_index = event.pageIndex * this.pageSize;
this.loadPaymentsTable(this.paymentJSONArr.slice(starting_index, (starting_index + this.pageSize)));
// this.store.dispatch(fetchPayments({ payload: { max_payments: page_size, index_offset: index_offset, reversed: reverse } }));
}
is_group(index: number, payment: Payment) {
return payment.htlcs && payment.htlcs.length > 1;
}
resetData() {
this.paymentDecoded = {};
this.paymentRequest = '';
this.form.resetForm();
}
getHopDetails(hops: Hop[]) {
const self = this;
return hops.reduce((accumulator, currentHop) => {
const peerFound = self.peers.find((peer) => peer.pub_key === currentHop.pub_key);
if (peerFound && peerFound.alias) {
accumulator.push('<pre>Channel: ' + peerFound.alias.padEnd(20) + '			Amount (Sats): ' + self.decimalPipe.transform(currentHop.amt_to_forward) + '</pre>');
} else {
self.dataService.getAliasesFromPubkeys(currentHop.pub_key, false).
pipe(takeUntil(self.unSubs[6])).
subscribe((res: any) => {
accumulator.push('<pre>Channel: ' + (res.node && res.node.alias ? res.node.alias.padEnd(20) : (currentHop.pub_key.substring(0, 17) + '...')) + '			Amount (Sats): ' + self.decimalPipe.transform(currentHop.amt_to_forward) + '</pre>');
});
}
return accumulator;
}, []);
}
onHTLCClick(selHtlc: PaymentHTLC, selPayment: Payment) {
if (selPayment.payment_request && selPayment.payment_request.trim() !== '') {
this.dataService.decodePayment(selPayment.payment_request, false).
pipe(take(1)).subscribe({
next: (decodedPayment: PayRequest) => {
setTimeout(() => {
this.showHTLCView(selHtlc, selPayment, decodedPayment);
}, 0);
}, error: (error) => {
this.showHTLCView(selHtlc, selPayment, null);
}
});
} else {
this.showHTLCView(selHtlc, selPayment, null);
}
}
showHTLCView(selHtlc: PaymentHTLC, selPayment: Payment, decodedPayment?: PayRequest) {
const reorderedHTLC = [
[{ key: 'payment_hash', value: selPayment.payment_hash, title: 'Payment Hash', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'preimage', value: selHtlc.preimage, title: 'Preimage', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'payment_request', value: selPayment.payment_request, title: 'Payment Request', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'status', value: selHtlc.status, title: 'Status', width: 33, type: DataTypeEnum.STRING },
{ key: 'attempt_time_ns', value: +selHtlc.attempt_time_ns / 1000000000, title: 'Attempt Time', width: 33, type: DataTypeEnum.DATE_TIME },
{ key: 'resolve_time_ns', value: +selHtlc.resolve_time_ns / 1000000000, title: 'Resolve Time', width: 34, type: DataTypeEnum.DATE_TIME }],
[{ key: 'total_amt', value: selHtlc.route.total_amt, title: 'Amount (Sats)', width: 33, type: DataTypeEnum.NUMBER },
{ key: 'total_fees', value: selHtlc.route.total_fees, title: 'Fee (Sats)', width: 33, type: DataTypeEnum.NUMBER },
{ key: 'total_time_lock', value: selHtlc.route.total_time_lock, title: 'Total Time Lock', width: 34, type: DataTypeEnum.NUMBER }],
[{ key: 'hops', value: this.getHopDetails(selHtlc.route.hops), title: 'Hops', width: 100, type: DataTypeEnum.ARRAY }]
];
if (decodedPayment && decodedPayment.description && decodedPayment.description !== '') {
reorderedHTLC.splice(3, 0, [{ key: 'description', value: decodedPayment.description, title: 'Description', width: 100, type: DataTypeEnum.STRING }]);
}
this.store.dispatch(openAlert({
payload: {
data: {
type: AlertTypeEnum.INFORMATION,
alertTitle: 'HTLC Information',
message: reorderedHTLC,
scrollable: selHtlc.route && selHtlc.route.hops && selHtlc.route.hops.length > 1
}
}
}));
}
onPaymentClick(selPayment: Payment) {
if (selPayment.htlcs && selPayment.htlcs[0] && selPayment.htlcs[0].route && selPayment.htlcs[0].route.hops && selPayment.htlcs[0].route.hops.length > 0) {
const nodePubkeys = selPayment.htlcs[0].route.hops.reduce((pubkeys, hop) => (pubkeys === '' ? hop.pub_key : pubkeys + ',' + hop.pub_key), '');
this.dataService.getAliasesFromPubkeys(nodePubkeys, true).pipe(takeUntil(this.unSubs[7])).
subscribe((nodes: any) => {
this.showPaymentView(selPayment, nodes.reduce((pathAliases, node) => (pathAliases === '' ? node : pathAliases + '\n' + node), ''));
});
} else {
this.showPaymentView(selPayment, '');
}
}
showPaymentView(selPayment: Payment, pathAliases: string) {
const reorderedPayment = [
[{ key: 'payment_hash', value: selPayment.payment_hash, title: 'Payment Hash', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'payment_preimage', value: selPayment.payment_preimage, title: 'Payment Preimage', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'payment_request', value: selPayment.payment_request, title: 'Payment Request', width: 100, type: DataTypeEnum.STRING }],
[{ key: 'status', value: selPayment.status, title: 'Status', width: 50, type: DataTypeEnum.STRING },
{ key: 'creation_date', value: selPayment.creation_date, title: 'Creation Date', width: 50, type: DataTypeEnum.DATE_TIME }],
[{ key: 'value_msat', value: selPayment.value_msat, title: 'Value (mSats)', width: 50, type: DataTypeEnum.NUMBER },
{ key: 'fee_msat', value: selPayment.fee_msat, title: 'Fee (mSats)', width: 50, type: DataTypeEnum.NUMBER }],
[{ key: 'path', value: pathAliases, title: 'Path', width: 100, type: DataTypeEnum.STRING }]
];
if (selPayment.payment_request && selPayment.payment_request.trim() !== '') {
this.dataService.decodePayment(selPayment.payment_request, false).
pipe(take(1)).subscribe((decodedPayment: PayRequest) => {
if (decodedPayment && decodedPayment.description && decodedPayment.description !== '') {
reorderedPayment.splice(3, 0, [{ key: 'description', value: decodedPayment.description, title: 'Description', width: 100, type: DataTypeEnum.STRING }]);
}
setTimeout(() => {
this.openPaymentAlert(reorderedPayment, (selPayment.htlcs && selPayment.htlcs[0] && selPayment.htlcs[0].route && selPayment.htlcs[0].route.hops && selPayment.htlcs[0].route.hops.length > 1));
}, 0);
});
} else {
this.openPaymentAlert(reorderedPayment, false);
}
}
openPaymentAlert(data: any, shouldScroll: boolean) {
this.store.dispatch(openAlert({
payload: {
data: {
type: AlertTypeEnum.INFORMATION,
alertTitle: 'Payment Information',
message: data,
scrollable: shouldScroll
}
}
}));
}
applyFilter() {
this.payments.filter = this.selFilter.trim().toLowerCase();
}
loadPaymentsTable(payms) {
this.payments = payms ? new MatTableDataSource<Payment>([...payms]) : new MatTableDataSource([]);
this.payments.sortingDataAccessor = (data: any, sortHeaderId: string) => {
switch (sortHeaderId) {
case 'hops':
return (data.htlcs.length && data.htlcs[0] && data.htlcs[0].route && data.htlcs[0].route.hops && data.htlcs[0].route.hops.length) ? data.htlcs[0].route.hops.length : 0;
default:
return (data[sortHeaderId] && isNaN(data[sortHeaderId])) ? data[sortHeaderId].toLocaleLowerCase() : data[sortHeaderId] ? +data[sortHeaderId] : null;
}
};
this.payments.sort = this.sort;
this.payments.filterPredicate = (payment: Payment, fltr: string) => {
const newPayment = ((payment.creation_date) ? this.datePipe.transform(new Date(payment.creation_date * 1000), 'dd/MMM/YYYY HH:mm').toLowerCase() : '') + JSON.stringify(payment).toLowerCase();
return newPayment.includes(fltr);
};
this.applyFilter();
}
onDownloadCSV() {
if (this.payments.data && this.payments.data.length > 0) {
const paymentsDataCopy = JSON.parse(JSON.stringify(this.payments.data));
const paymentRequests = paymentsDataCopy.reduce((paymentReqs, payment) => {
if (payment.payment_request && payment.payment_request.trim() !== '') {
paymentReqs = (paymentReqs === '') ? payment.payment_request : paymentReqs + ',' + payment.payment_request;
}
return paymentReqs;
}, '');
this.dataService.decodePayments(paymentRequests).
pipe(takeUntil(this.unSubs[8])).
subscribe((decodedPayments: PayRequest[]) => {
let increament = 0;
decodedPayments.forEach((decodedPayment, idx) => {
if (decodedPayment) {
while (paymentsDataCopy[idx + increament].payment_hash !== decodedPayment.payment_hash) {
increament = increament + 1;
}
paymentsDataCopy[idx + increament].description = decodedPayment.description;
}
});
const flattenedPayments = paymentsDataCopy.reduce((acc, curr) => acc.concat(curr), []);
this.commonService.downloadFile(flattenedPayments, 'Payments');
});
}
}
ngOnDestroy() {
this.unSubs.forEach((completeSub) => {
completeSub.next(null);
completeSub.complete();
});
}
} | the_stack |
import { toQueryString } from "../../utils";
import { SdkClient } from "../common/sdk-client";
import { AnomalyDetectionModels } from "./anomaly-detection-models";
/**
* Service for configuring, reading and managing assets, asset ~ and aspect types.
*
* @export
* @class AssetManagementClient
* @extends {SdkClient}
*/
export class AnomalyDetectionClient extends SdkClient {
private _baseUrl: string = "/api/anomalydetection/v3";
/**
* Creates new model based on given timeseries data. Analytic Model Management service is used to store created model.
* @summary Train model
* @param {Array<Timeseries>} ioTTimeSeriesItems An array containing the time series items. Data to train a model. Data must contain 10 variables at max. Each timeseries item must have equal number of variables.
* @param {number} epsilon Threshold for the distance to check if point belongs to cluster.
* @param {number} minPointsPerCluster Minimum cluster size. Positive. Minimum is 2.
* @param {'EUCLIDEAN' | 'MANHATTAN' | 'CHEBYSHEV'} [distanceMeasureAlgorithm] Name of the distance measure algorithm.
* @param {string} [name] Human-friendly name of the model. If a name is provided, it must not be an empty string. Maximum length is 255 characters. Only ASCII characters are allowed. Example 'Test Model
* @returns {Array<AnomalyDetectionModels.Timeseries>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async PostModel(
ioTTimeSeriesItems: Array<AnomalyDetectionModels.Timeseries>,
epsilon: number,
minPointsPerCluster: number,
distanceMeasureAlgorithm?: "EUCLIDEAN" | "MANHATTAN" | "CHEBYSHEV",
name?: string
): Promise<AnomalyDetectionModels.Model> {
// verify required parameter 'ioTTimeSeriesItems' is not null or undefined
if (ioTTimeSeriesItems === null || ioTTimeSeriesItems === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"ioTTimeSeriesItems",
"Required parameter ioTTimeSeriesItems was null or undefined when calling modelsPost."
);
}
// verify required parameter 'epsilon' is not null or undefined
if (epsilon === null || epsilon === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"epsilon",
"Required parameter epsilon was null or undefined when calling modelsPost."
);
}
// verify required parameter 'minPointsPerCluster' is not null or undefined
if (minPointsPerCluster === null || minPointsPerCluster === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"minPointsPerCluster",
"Required parameter minPointsPerCluster was null or undefined when calling modelsPost."
);
}
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/models?${toQueryString({
epsilon,
minPointsPerCluster,
distanceMeasureAlgorithm,
name,
})}`,
body: ioTTimeSeriesItems || {},
additionalHeaders: { "Content-Type": "application/json" },
});
return result as AnomalyDetectionModels.Model;
}
/**
* Performs anomaly detection for given timeseries data against specified model.
* @summary Anomaly detection
* @param {Array<AnomalyDetectionModels.Timeseries>} ioTTimeSeriesItems An array containing the time series items. Data to performs detection on. Data must contain 10 variables at max. Each timeseries item must have equal number of variables. Variables must be the same as the ones used to train the model (the same number of variables and the same names).
* @param {string} modelID ID of the model to use.
* @returns {Promise<Array<AnomalyDetectionModels.Anomaly>>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async DetectAnomalies(
ioTTimeSeriesItems: Array<AnomalyDetectionModels.Timeseries>,
modelID: string
): Promise<Array<AnomalyDetectionModels.Anomaly>> {
// verify required parameter 'ioTTimeSeriesItems' is not null or undefined
if (ioTTimeSeriesItems === null || ioTTimeSeriesItems === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"ioTTimeSeriesItems",
"Required parameter ioTTimeSeriesItems was null or undefined when calling detectanomaliesPost."
);
}
// verify required parameter 'modelID' is not null or undefined
if (modelID === null || modelID === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"modelID",
"Required parameter modelID was null or undefined when calling detectanomaliesPost."
);
}
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/detectanomalies?${toQueryString({ modelID })}`,
body: ioTTimeSeriesItems || {},
additionalHeaders: { "Content-Type": "application/json" },
});
return result as Array<AnomalyDetectionModels.Anomaly>;
}
/**
* Creates new model based on given asset details. Analytic Model Management service is used to store created model.
* @summary Train model in direct integration with IoT time series
* @param {number} epsilon Threshold for the distance to check if point belongs to cluster.
* @param {number} minPointsPerCluster Minimum cluster size. Positive. Minimum is 2.
* @param {string} assetId unique identifier of the asset (entity)
* @param {string} aspectName Name of the aspect (property set).
* @param {Date} from Beginning of the time range to be retrieved (exclusive).
* @param {Date} to End of the time range to be retrieved (exclusive).
* @param {'EUCLIDEAN' | 'MANHATTAN' | 'CHEBYSHEV'} [distanceMeasureAlgorithm] Name of the distance measure algorithm.
* @param {string} [name] Human-friendly name of the model. If a name is provided, it must not be an empty string. Maximum length is 255 characters. Only ASCII characters are allowed. Example 'Test Model
* @returns {Promise<AnomalyDetectionModels.Model>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async PostModelDirect(
epsilon: number,
minPointsPerCluster: number,
assetId: string,
aspectName: string,
from: Date,
to: Date,
distanceMeasureAlgorithm?: "EUCLIDEAN" | "MANHATTAN" | "CHEBYSHEV",
name?: string
): Promise<AnomalyDetectionModels.Model> {
// verify required parameter 'epsilon' is not null or undefined
if (epsilon === null || epsilon === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"epsilon",
"Required parameter epsilon was null or undefined when calling modelsDirectPost."
);
}
// verify required parameter 'minPointsPerCluster' is not null or undefined
if (minPointsPerCluster === null || minPointsPerCluster === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"minPointsPerCluster",
"Required parameter minPointsPerCluster was null or undefined when calling modelsDirectPost."
);
}
// verify required parameter 'assetId' is not null or undefined
if (assetId === null || assetId === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"assetId",
"Required parameter assetId was null or undefined when calling modelsDirectPost."
);
}
// verify required parameter 'aspectName' is not null or undefined
if (aspectName === null || aspectName === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"aspectName",
"Required parameter aspectName was null or undefined when calling modelsDirectPost."
);
}
// verify required parameter 'from' is not null or undefined
if (from === null || from === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"from",
"Required parameter from was null or undefined when calling modelsDirectPost."
);
}
// verify required parameter 'to' is not null or undefined
if (to === null || to === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"to",
"Required parameter to was null or undefined when calling modelsDirectPost."
);
}
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/modelsDirect?${toQueryString({
epsilon,
minPointsPerCluster,
assetId,
aspectName,
from,
to,
distanceMeasureAlgorithm,
name,
})}`,
additionalHeaders: { "Content-Type": "application/json" },
});
return result as AnomalyDetectionModels.Model;
}
/**
* Performs anomaly detection for given asset details against specified model.
* @summary Anomaly detection in direct integration with IoT time series
* @param {string} modelID ID of the model to use.
* @param {string} assetId unique identifier of the asset (entity)
* @param {string} aspectName Name of the aspect (property set).
* @param {Date} from Beginning of the time range to be retrieved (exclusive).
* @param {Date} to End of the time range to be retrieved (exclusive).
* @returns {Promise<Array<AnomalyDetectionModels.Anomaly>>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async DetectAnomaliesDirect(
modelID: string,
assetId: string,
aspectName: string,
from: Date,
to: Date
): Promise<Array<AnomalyDetectionModels.Anomaly>> {
// verify required parameter 'modelID' is not null or undefined
if (modelID === null || modelID === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"modelID",
"Required parameter modelID was null or undefined when calling detectanomaliesDirectPost."
);
}
// verify required parameter 'assetId' is not null or undefined
if (assetId === null || assetId === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"assetId",
"Required parameter assetId was null or undefined when calling detectanomaliesDirectPost."
);
}
// verify required parameter 'aspectName' is not null or undefined
if (aspectName === null || aspectName === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"aspectName",
"Required parameter aspectName was null or undefined when calling detectanomaliesDirectPost."
);
}
// verify required parameter 'from' is not null or undefined
if (from === null || from === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"from",
"Required parameter from was null or undefined when calling detectanomaliesDirectPost."
);
}
// verify required parameter 'to' is not null or undefined
if (to === null || to === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"to",
"Required parameter to was null or undefined when calling detectanomaliesDirectPost."
);
}
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/detectanomaliesDirect?${toQueryString({
modelID,
assetId,
aspectName,
from,
to,
})}`,
additionalHeaders: { "Content-Type": "application/json" },
});
return result as Array<AnomalyDetectionModels.Anomaly>;
}
/**
* Cancels specified job.
* @summary Cancel jod.
* @param {string} id ID of the job to get status for.
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async CancelDetectAnomaliesJob(id: string) {
// verify required parameter 'id' is not null or undefined
if (id === null || id === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"id",
"Required parameter id was null or undefined when calling detectAnomaliesJobsIdCancelPost."
);
}
await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/detectAnomaliesJobs/${id}/cancel`,
additionalHeaders: { "Content-Type": "application/json" },
noResponse: true,
});
}
/**
* Returns current status for specified jod.
* @summary Get job status.
* @param {string} id ID of the job to get status for.
* @returns {Promise<AnomalyDetectionModels.ReasoningJobInfo>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async GetDetectAnomaliesJob(id: string): Promise<AnomalyDetectionModels.ReasoningJobInfo> {
// verify required parameter 'id' is not null or undefined
if (id === null || id === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"id",
"Required parameter id was null or undefined when calling detectAnomaliesJobsIdGet."
);
}
const result = await this.HttpAction({
verb: "GET",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/detectAnomaliesJobs/${id}`,
additionalHeaders: { "Content-Type": "application/json" },
});
return result as AnomalyDetectionModels.ReasoningJobInfo;
}
/**
* Performs long running reasoning (anomaly detection) for given timeseries data.
* @summary Anomaly Detection batch reasoning
* @param {AnomalyDetectionModels.SubmitReasoningRequest} [submitReasoningRequest] Request data to launch reasoning job.
* @returns {Promise<AnomalyDetectionModels.ReasoningJobInfo>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async PostDetectAnomaliesJob(
submitReasoningRequest?: AnomalyDetectionModels.SubmitReasoningRequest
): Promise<AnomalyDetectionModels.ReasoningJobInfo> {
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/detectAnomaliesJobs`,
body: submitReasoningRequest || {},
additionalHeaders: { "Content-Type": "application/json" },
});
return result as AnomalyDetectionModels.ReasoningJobInfo;
}
/**
* Performs long running model training for given timeseries data. The number of timeseries items to process is limited by 1000000, this parameter can be changed in the future without notice.
* @summary Anomaly Detection batch model training
* @param {AnomalyDetectionModels.SubmitTrainingRequest} [submitTrainingRequest] Request data to launch training job.
* @returns {Promise<AnomalyDetectionModels.TrainingJobInfo>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async PostTrainModelJob(
submitTrainingRequest?: AnomalyDetectionModels.SubmitTrainingRequest
): Promise<AnomalyDetectionModels.TrainingJobInfo> {
const result = await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/trainModelJobs`,
body: submitTrainingRequest || {},
additionalHeaders: { "Content-Type": "application/json" },
});
return result as AnomalyDetectionModels.TrainingJobInfo;
}
/**
* Returns current status for specified jod.
* @summary Get job status.
* @param {string} id ID of the job to get status for.
* @returns {Promise<AnomalyDetectionModels.TrainingJobInfo>}
* @throws {AnomalyDetectionModels.RequiredError}
* @memberof AnomalyDetectionClient
*/
public async GetTrainModelJob(id: string): Promise<AnomalyDetectionModels.TrainingJobInfo> {
// verify required parameter 'id' is not null or undefined
if (id === null || id === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"id",
"Required parameter id was null or undefined when calling trainModelJobsIdGet."
);
}
const result = await this.HttpAction({
verb: "GET",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/trainModelJobs/${id}`,
additionalHeaders: { "Content-Type": "application/json" },
noResponse: true,
});
return result as AnomalyDetectionModels.TrainingJobInfo;
}
/**
* Cancels specified job.
* @summary Cancel job.
* @param {string} id ID of the job to get status for.
* @throws {RequiredError}
* @memberof AnomalyDetectionClient
*/
public async CancelTrainModelJob(id: string) {
// verify required parameter 'id' is not null or undefined
if (id === null || id === undefined) {
throw new AnomalyDetectionModels.RequiredError(
"id",
"Required parameter id was null or undefined when calling trainModelJobsIdCancelPost."
);
}
await this.HttpAction({
verb: "POST",
gateway: this.GetGateway(),
authorization: await this.GetToken(),
baseUrl: `${this._baseUrl}/trainModelJobs/${id}/cancel`,
additionalHeaders: { "Content-Type": "application/json" },
noResponse: true,
});
}
} | the_stack |
import {EventAggregator, Subscription} from 'aurelia-event-aggregator';
import {bindable, computedFrom, inject, observable} from 'aurelia-framework';
import {Router} from 'aurelia-router';
import {IDiagram} from '@process-engine/solutionexplorer.contracts';
import {IResponse} from '@essential-projects/http_contracts';
import {
AuthenticationStateEvent,
IFile,
IInputEvent,
ISolutionEntry,
ISolutionService,
NotificationType,
StudioVersion,
} from '../../../contracts/index';
import environment from '../../../environment';
import {NotificationService} from '../../../services/notification-service/notification.service';
import {SolutionExplorerList} from '../solution-explorer-list/solution-explorer-list';
import {getPortListByVersion} from '../../../services/default-ports-module/default-ports.module';
import {HttpFetchClient} from '../../fetch-http-client/http-fetch-client';
import {solutionIsRemoteSolution} from '../../../services/solution-is-remote-solution-module/solution-is-remote-solution.module';
import {isRunningInElectron} from '../../../services/is-running-in-electron-module/is-running-in-electron.module';
type RemoteSolutionListEntry = {
uri: string;
status: boolean;
version?: StudioVersion;
};
enum SupportedProtocols {
HTTPS = 'https://',
HTTP = 'http://',
}
/**
* This component handels:
* - Opening files via drag and drop
* - Opening files via double click
* - Opening solution/diagrams via input field
* - Refreshing all opened solutions via button
* - Refreshing on login/logout
* - Updating the remote processengine uri if needed
*/
@inject(EventAggregator, 'NotificationService', Router, 'SolutionService', 'HttpFetchClient')
export class SolutionExplorerPanel {
@observable public selectedProtocol: string = 'http://';
// Fields below are bound from the html view.
public solutionExplorerList: SolutionExplorerList;
public solutionInput: HTMLInputElement;
public openDiagramInput: HTMLInputElement;
public showOpenRemoteSolutionModal: boolean = false;
@bindable public uriOfRemoteSolutionWithoutProtocol: string;
public solutionExplorerPanel: SolutionExplorerPanel = this;
public remoteSolutionHistoryStatus: Map<string, boolean> = new Map<string, boolean>();
public availableDefaultRemoteSolutions: Array<RemoteSolutionListEntry> = [];
public isConnecting: boolean = false;
public connectionError: string;
public supportedProtocols: typeof SupportedProtocols = SupportedProtocols;
private eventAggregator: EventAggregator;
private notificationService: NotificationService;
private router: Router;
// TODO: Add typings
private ipcRenderer: any | null = null;
private subscriptions: Array<Subscription> = [];
private solutionService: ISolutionService;
private remoteSolutionHistoryStatusPollingTimer: NodeJS.Timer;
private remoteSolutionHistoryStatusIsPolling: boolean;
private httpFetchClient: HttpFetchClient;
constructor(
eventAggregator: EventAggregator,
notificationService: NotificationService,
router: Router,
solutionService: ISolutionService,
httpFetchClient: HttpFetchClient,
) {
this.eventAggregator = eventAggregator;
this.notificationService = notificationService;
this.router = router;
this.solutionService = solutionService;
this.httpFetchClient = httpFetchClient;
if (isRunningInElectron()) {
this.ipcRenderer = (window as any).nodeRequire('electron').ipcRenderer;
}
}
public get canReadFromFileSystem(): boolean {
return isRunningInElectron();
}
public get connectionErrorExists(): boolean {
return this.connectionError !== undefined;
}
public get remoteSolutionHistoryWithStatus(): Array<RemoteSolutionListEntry> {
return this.loadRemoteSolutionHistory()
.reverse()
.map((solutionUri: string) => {
return {
uri: solutionUri,
status: this.remoteSolutionHistoryStatus.get(solutionUri),
};
});
}
@computedFrom('availableDefaultRemoteSolutions.length', 'remoteSolutionHistoryWithStatus.length')
public get suggestedRemoteSolutions(): Array<RemoteSolutionListEntry> {
const filteredRemoteSolutionHistory: Array<RemoteSolutionListEntry> = this.remoteSolutionHistoryWithStatus.filter(
(remoteSolution: RemoteSolutionListEntry) => {
const remoteSolutionIsDefaultRemoteSolution: boolean = this.availableDefaultRemoteSolutions.some(
(defaultRemoteSolution: RemoteSolutionListEntry) => {
return defaultRemoteSolution.uri === remoteSolution.uri;
},
);
return !remoteSolutionIsDefaultRemoteSolution;
},
);
const suggestedRemoteSolutions: Array<RemoteSolutionListEntry> = [
...this.availableDefaultRemoteSolutions,
...filteredRemoteSolutionHistory,
];
return suggestedRemoteSolutions;
}
@computedFrom('suggestedRemoteSolutions.length')
public get unconnectedSuggestedRemoteSolutions(): Array<RemoteSolutionListEntry> {
const connectedSolutions: Array<ISolutionEntry> = this.solutionService.getAllSolutionEntries();
const unconnectedSuggestedRemoteSolutions: Array<RemoteSolutionListEntry> = this.suggestedRemoteSolutions.filter(
(remoteSolution) => {
return !connectedSolutions.some((connectedSolution: ISolutionEntry) => {
return connectedSolution.uri === remoteSolution.uri;
});
},
);
return unconnectedSuggestedRemoteSolutions;
}
@computedFrom('unconnectedSuggestedRemoteSolutions.length')
public get unconnectedSuggestedRemoteSolutionsExist(): boolean {
return this.unconnectedSuggestedRemoteSolutions.length > 0;
}
public get uriOfRemoteSolution(): string {
return `${this.selectedProtocol}${this.uriOfRemoteSolutionWithoutProtocol}`;
}
public get uriIsEmpty(): boolean {
const uriIsEmtpy: boolean =
this.uriOfRemoteSolutionWithoutProtocol === undefined || this.uriOfRemoteSolutionWithoutProtocol.length === 0;
return uriIsEmtpy;
}
public get uriIsValid(): boolean {
/**
* This RegEx checks if the entered URI is valid or not.
*/
// TODO Check if this still works
const urlRegEx: RegExp = /^(?:http(s)?:\/\/)+[\w.-]?[\w\-._~:/?#[\]@!$&'()*+,;=.]+$/g;
const uriIsValid: boolean = urlRegEx.test(this.uriOfRemoteSolution);
return uriIsValid;
}
public async bind(): Promise<void> {
// Open the solution of the currently configured processengine instance on startup.
const uriOfProcessEngine: string = window.localStorage.getItem('InternalProcessEngineRoute');
const persistedInternalSolution: ISolutionEntry = this.solutionService.getSolutionEntryForUri(uriOfProcessEngine);
const internalSolutionWasPersisted: boolean = persistedInternalSolution !== undefined;
if ((window as any).nodeRequire) {
try {
if (internalSolutionWasPersisted) {
this.solutionExplorerList.openSolution(uriOfProcessEngine, false, persistedInternalSolution.identity);
} else {
this.solutionExplorerList.openSolution(uriOfProcessEngine);
}
} catch {
return;
}
}
// Open the previously opened solutions.
const previouslyOpenedSolutions: Array<ISolutionEntry> = this.solutionService.getPersistedEntries();
previouslyOpenedSolutions.forEach((entry: ISolutionEntry) => {
// We are not adding the solution of the connect PE here again since that happened above.
const entryIsNotConnectedProcessEngine: boolean = entry.uri !== uriOfProcessEngine;
if (entryIsNotConnectedProcessEngine) {
/**
* Since we can't distinguish if the persisted ProcessEngine was an
* internal or external one yet, we consume any connection error
* produced by the openSolution method.
*/
try {
this.solutionExplorerList.openSolution(entry.uri, false, entry.identity);
} catch (error) {
// Do nothing
}
}
});
const persistedOpenDiagrams: Array<IDiagram> = this.solutionService.getOpenDiagrams();
for (const persistedOpenDiagram of persistedOpenDiagrams) {
try {
await this.solutionExplorerList.openDiagram(persistedOpenDiagram.uri);
} catch {
// Do nothing
}
}
if (isRunningInElectron()) {
this.registerElectronHooks();
}
}
public async attached(): Promise<void> {
if (isRunningInElectron()) {
document.addEventListener('drop', this.openDiagramOnDropBehaviour);
}
this.subscriptions = [
this.eventAggregator.subscribe(environment.events.diagramDetail.onDiagramDeployed, () => {
this.refreshSolutions();
}),
this.eventAggregator.subscribe(environment.events.startPage.openLocalSolution, () => {
this.openSolution();
}),
this.eventAggregator.subscribe(environment.events.startPage.openDiagram, () => {
this.openDiagram();
}),
this.eventAggregator.subscribe(environment.events.startPage.createDiagram, () => {
this.createNewDiagram();
}),
this.eventAggregator.subscribe(AuthenticationStateEvent.LOGOUT, () => {
this.solutionExplorerList.refreshSolutions();
}),
];
}
public detached(): void {
if (isRunningInElectron()) {
this.removeElectronFileOpeningHooks();
document.removeEventListener('drop', this.openDiagramOnDropBehaviour);
}
for (const subscription of this.subscriptions) {
subscription.dispose();
}
}
public async openRemoteSolutionModal(): Promise<void> {
this.showOpenRemoteSolutionModal = true;
await this.updateRemoteSolutionHistoryStatus();
this.startPollingOfRemoteSolutionHistoryStatus();
this.updateDefaultRemoteSolutions();
}
public removeSolutionFromHistory(solutionUri: string, event?: Event): void {
if (event) {
event.stopPropagation();
}
this.removeSolutionFromSolutionHistroy(solutionUri);
}
public selectProtocol(protocol: SupportedProtocols): void {
this.selectedProtocol = protocol;
}
public closeRemoteSolutionModal(): void {
this.stopPollingOfRemoteSolutionHistoryStatus();
this.solutionExplorerList.cancelOpeningSolution(this.uriOfRemoteSolution);
this.isConnecting = false;
this.showOpenRemoteSolutionModal = false;
this.uriOfRemoteSolutionWithoutProtocol = undefined;
this.connectionError = undefined;
}
public uriOfRemoteSolutionWithoutProtocolChanged(): void {
if (this.uriOfRemoteSolutionWithoutProtocol === undefined) {
return;
}
for (const protocol of Object.values(SupportedProtocols)) {
if (this.uriOfRemoteSolutionWithoutProtocol.startsWith(protocol)) {
this.uriOfRemoteSolutionWithoutProtocol = this.uriOfRemoteSolutionWithoutProtocol.replace(protocol, '');
this.selectProtocol(protocol);
}
}
}
public async openRemoteSolution(): Promise<void> {
if (!this.uriIsValid || this.uriIsEmpty) {
return;
}
this.connectionError = undefined;
try {
const lastCharacterIsASlash: boolean = this.uriOfRemoteSolutionWithoutProtocol.endsWith('/');
if (lastCharacterIsASlash) {
this.uriOfRemoteSolutionWithoutProtocol = this.uriOfRemoteSolutionWithoutProtocol.slice(0, -1);
}
await this.addSolutionToRemoteSolutionHistory(this.uriOfRemoteSolution);
this.isConnecting = true;
await this.solutionExplorerList.openSolution(this.uriOfRemoteSolution);
this.isConnecting = false;
} catch (error) {
this.isConnecting = false;
const genericMessage: string = `Unable to connect to ProcessEngine on: ${this.uriOfRemoteSolution}`;
const cause: string = error.message ? error.message : '';
this.connectionError = `${genericMessage}\n${cause}`;
return;
}
this.closeRemoteSolutionModal();
}
/**
* Handles the file input for the FileSystem Solutions.
* @param event A event that holds the files that were "uploaded" by the user.
* Currently there is no type for this kind of event.
*/
public async onSolutionInputChange(event: IInputEvent): Promise<void> {
const uri: string = event.target.files[0].path;
this.solutionInput.value = '';
this.openSolutionOrDisplayError(uri);
}
/**
* Handles the file input change event for the open file input.
* @param event An event that holds the files that were "uploaded" by the user.
* Currently there is no type for this kind of event.
*/
public async onOpenDiagramInputChange(event: IInputEvent): Promise<void> {
const uri: string = event.target.files[0].path;
this.openDiagramInput.value = '';
return this.openDiagramOrDisplayError(uri);
}
public async openDiagram(): Promise<void> {
const canNotReadFromFileSystem: boolean = !isRunningInElectron();
if (canNotReadFromFileSystem) {
this.openDiagramInput.click();
return;
}
this.ipcRenderer.send('open_diagram');
this.ipcRenderer.once('import_opened_diagram', async (event: Event, openedFile: File) => {
const noFileSelected: boolean = openedFile === null;
if (noFileSelected) {
return;
}
const filePath: string = openedFile[0];
await this.openDiagramOrDisplayError(filePath);
});
}
public async openSolution(): Promise<void> {
const canNotReadFromFileSystem: boolean = !isRunningInElectron();
if (canNotReadFromFileSystem) {
this.solutionInput.click();
return;
}
this.ipcRenderer.send('open_solution');
this.ipcRenderer.once('import_opened_solution', async (event: Event, openedFolder: File) => {
const noFolderSelected: boolean = openedFolder === null;
if (noFolderSelected) {
return;
}
const folderPath: string = openedFolder[0];
await this.openSolutionOrDisplayError(folderPath);
});
}
public getBadgeForVersion(version: StudioVersion): string {
switch (version) {
case StudioVersion.Dev:
return 'remote-solution-badge__dev';
case StudioVersion.Alpha:
return 'remote-solution-badge__alpha';
case StudioVersion.Beta:
return 'remote-solution-badge__beta';
case StudioVersion.Stable:
return 'remote-solution-badge__stable';
default:
return 'remote-solution-badge__dev';
}
}
public getVersionNameForVersion(version: StudioVersion): string {
switch (version) {
case StudioVersion.Dev:
return 'Development';
case StudioVersion.Alpha:
return 'BPMN Studio Alpha';
case StudioVersion.Beta:
return 'BPMN Studio Beta';
case StudioVersion.Stable:
return 'BPMN Studio';
default:
return 'Development';
}
}
public selectRemoteSolution(remoteSolutionUri: string): void {
// tslint:disable-next-line no-magic-numbers
const protocolEndIndex: number = remoteSolutionUri.indexOf('//') + 2;
const protocol: string = remoteSolutionUri.substring(0, protocolEndIndex);
const protocolKey = Object.keys(SupportedProtocols).find((supportedProtocolKey) => {
return SupportedProtocols[supportedProtocolKey] === protocol;
});
const uri: string = remoteSolutionUri.substring(protocolEndIndex, remoteSolutionUri.length);
this.selectProtocol(SupportedProtocols[protocolKey]);
this.uriOfRemoteSolutionWithoutProtocol = uri;
}
private startPollingOfRemoteSolutionHistoryStatus(): void {
this.remoteSolutionHistoryStatusIsPolling = true;
this.pollRemoteSolutionHistoryStatus();
}
private pollRemoteSolutionHistoryStatus(): void {
this.remoteSolutionHistoryStatusPollingTimer = setTimeout(async () => {
await this.updateRemoteSolutionHistoryStatus();
if (!this.remoteSolutionHistoryStatusIsPolling) {
return;
}
this.pollRemoteSolutionHistoryStatus();
}, environment.processengine.updateRemoteSolutionHistoryIntervalInMs);
}
private stopPollingOfRemoteSolutionHistoryStatus(): void {
const noTimerExisting: boolean = this.remoteSolutionHistoryStatusPollingTimer === undefined;
if (noTimerExisting) {
return;
}
clearTimeout(this.remoteSolutionHistoryStatusPollingTimer);
this.remoteSolutionHistoryStatusPollingTimer = undefined;
this.remoteSolutionHistoryStatusIsPolling = false;
}
private async updateRemoteSolutionHistoryStatus(): Promise<void> {
this.remoteSolutionHistoryWithStatus.forEach(
async (remoteSolutionWithStatus: RemoteSolutionListEntry): Promise<void> => {
const remoteSolutionStatus: boolean = await this.isRemoteSolutionActive(remoteSolutionWithStatus.uri);
this.remoteSolutionHistoryStatus.set(remoteSolutionWithStatus.uri, remoteSolutionStatus);
},
);
}
private async updateDefaultRemoteSolutions(): Promise<void> {
this.availableDefaultRemoteSolutions = [];
const stableRemoteSolution: Promise<RemoteSolutionListEntry | null> = this.searchDefaultRemoteSolutionForVersion(
StudioVersion.Stable,
);
const betaRemoteSolution: Promise<RemoteSolutionListEntry | null> = this.searchDefaultRemoteSolutionForVersion(
StudioVersion.Beta,
);
const alphaRemoteSolution: Promise<RemoteSolutionListEntry | null> = this.searchDefaultRemoteSolutionForVersion(
StudioVersion.Alpha,
);
const devRemoteSolution: Promise<RemoteSolutionListEntry | null> = this.searchDefaultRemoteSolutionForVersion(
StudioVersion.Dev,
);
const availableRemoteSolutions: Array<RemoteSolutionListEntry> = await Promise.all([
stableRemoteSolution,
betaRemoteSolution,
alphaRemoteSolution,
devRemoteSolution,
]);
this.availableDefaultRemoteSolutions = availableRemoteSolutions.filter(
(remoteSolution: RemoteSolutionListEntry | null) => {
return remoteSolution !== null;
},
);
}
private async searchDefaultRemoteSolutionForVersion(version: StudioVersion): Promise<RemoteSolutionListEntry | null> {
const portsToCheck: Array<number> = getPortListByVersion(version);
const processEngineUri: string = await this.getActiveProcessEngineForPortList(portsToCheck);
const noActiveProcessEngineFound: boolean = processEngineUri === null;
if (noActiveProcessEngineFound) {
return null;
}
return {
uri: processEngineUri,
status: true,
version: version,
};
}
private async getActiveProcessEngineForPortList(portsToCheck: Array<number>): Promise<string | null> {
for (const port of portsToCheck) {
const uriToCheck: string = `http://localhost:${port}`;
const processEngineFound: boolean = await this.isRemoteSolutionActive(uriToCheck);
if (processEngineFound) {
return uriToCheck;
}
}
return null;
}
private async isRemoteSolutionActive(remoteSolutionUri: string): Promise<boolean> {
try {
let response: IResponse<JSON>;
try {
response = await this.httpFetchClient.get(`${remoteSolutionUri}/process_engine`);
} catch (error) {
const errorIsNotFoundError: boolean = error.code === 404;
if (errorIsNotFoundError) {
response = await this.httpFetchClient.get(`${remoteSolutionUri}`);
} else {
throw error;
}
}
const isResponseFromProcessEngine: boolean = response.result['name'] === '@process-engine/process_engine_runtime';
if (!isResponseFromProcessEngine) {
throw new Error('The response was not send by a ProcessEngine.');
}
return true;
} catch {
return false;
}
}
private async refreshSolutions(): Promise<void> {
return this.solutionExplorerList.refreshSolutions();
}
private async openSolutionOrDisplayError(uri: string): Promise<void> {
try {
await this.solutionExplorerList.openSolution(uri);
} catch (error) {
this.notificationService.showNotification(NotificationType.ERROR, error.message);
}
}
private loadRemoteSolutionHistory(): Array<string> {
const remoteSolutionHistoryFromLocalStorage: string | null = localStorage.getItem('remoteSolutionHistory');
const noHistoryExisting: boolean = remoteSolutionHistoryFromLocalStorage === null;
const remoteSolutionHistory: Array<string> = noHistoryExisting
? []
: JSON.parse(remoteSolutionHistoryFromLocalStorage);
return remoteSolutionHistory;
}
private saveRemoteSolutionHistory(remoteSolutionHistory: Array<string>): void {
const remoteSolutionHistoryString: string = JSON.stringify(remoteSolutionHistory);
localStorage.setItem('remoteSolutionHistory', remoteSolutionHistoryString);
}
private addSolutionToRemoteSolutionHistory(solutionUri: string): void {
this.removeSolutionFromSolutionHistroy(solutionUri);
const remoteSolutionHistory: Array<string> = this.loadRemoteSolutionHistory();
remoteSolutionHistory.push(solutionUri);
this.saveRemoteSolutionHistory(remoteSolutionHistory);
}
private removeSolutionFromSolutionHistroy(solutionUri: string): void {
const remoteSolutionHistory: Array<string> = this.loadRemoteSolutionHistory();
const uniqueRemoteSolutionHistory: Array<string> = remoteSolutionHistory.filter((remoteSolutionUri: string) => {
return remoteSolutionUri !== solutionUri;
});
this.saveRemoteSolutionHistory(uniqueRemoteSolutionHistory);
}
private async openDiagramOrDisplayError(uri: string): Promise<void> {
try {
const openedDiagram: IDiagram = await this.solutionExplorerList.openDiagram(uri);
const solution: ISolutionEntry = this.solutionExplorerList.getOpenDiagramSolutionEntry();
this.solutionService.addOpenDiagram(openedDiagram);
await this.navigateToDetailView(openedDiagram, solution);
} catch (error) {
// The diagram may already be opened.
const diagram: IDiagram | null = await this.solutionExplorerList.getOpenedDiagramByURI(uri);
const solution: ISolutionEntry = this.solutionExplorerList.getOpenDiagramSolutionEntry();
const diagramWithURIIsAlreadyOpened: boolean = diagram !== null;
if (diagramWithURIIsAlreadyOpened) {
return this.navigateToDetailView(diagram, solution);
}
this.notificationService.showNotification(NotificationType.ERROR, error.message);
}
return undefined;
}
private electronFileOpeningHook = async (_: Event, pathToFile: string): Promise<void> => {
const uri: string = pathToFile;
await this.openDiagramOrDisplayError(uri);
};
private electronOnMenuOpenDiagramHook = async (_: Event): Promise<void> => {
this.openDiagram();
};
private electronOnMenuOpenSolutionHook = async (_: Event): Promise<void> => {
this.openSolution();
};
private electronOnCreateDiagram = async (_: Event): Promise<void> => {
await this.openNewDiagram();
setTimeout(() => {
const openDiagramsEntry = document.getElementsByClassName('open-diagrams-entry')[0];
openDiagramsEntry.scrollTop = openDiagramsEntry.scrollHeight;
}, 0);
};
private openNewDiagram(): Promise<void> {
const uri: string = 'about:open-diagrams';
return this.solutionExplorerList.createDiagram(uri);
}
private createNewDiagram(): void {
const activeSolutionUri: string = this.router.currentInstruction.queryParams.solutionUri;
const activeSolution: ISolutionEntry = this.solutionService.getSolutionEntryForUri(activeSolutionUri);
const activeSolutionCanCreateDiagrams: boolean =
activeSolution !== undefined && !solutionIsRemoteSolution(activeSolution.uri);
const uri: string = activeSolutionCanCreateDiagrams ? activeSolutionUri : 'about:open-diagrams';
this.solutionExplorerList.createDiagram(uri);
}
private async registerElectronHooks(): Promise<void> {
// Register handler for double-click event fired from "electron.js".
this.ipcRenderer.on('double-click-on-file', this.electronFileOpeningHook);
this.ipcRenderer.on('menubar__start_opening_diagram', this.electronOnMenuOpenDiagramHook);
this.ipcRenderer.on('menubar__start_opening_solution', this.electronOnMenuOpenSolutionHook);
this.ipcRenderer.on('menubar__start_create_diagram', this.electronOnCreateDiagram);
// Send event to signal the component is ready to handle the event.
this.ipcRenderer.send('waiting-for-double-file-click');
// Check if there was a double click before BPMN Studio was loaded.
const fileInfo: IFile = this.ipcRenderer.sendSync('get_opened_file');
if (fileInfo.path) {
// There was a file opened before BPMN Studio was loaded, open it.
const uri: string = fileInfo.path;
await this.openDiagramOrDisplayError(uri);
}
}
private removeElectronFileOpeningHooks(): void {
// Register handler for double-click event fired from "electron.js".
this.ipcRenderer.removeListener('double-click-on-file', this.electronFileOpeningHook);
this.ipcRenderer.removeListener('menubar__start_opening_diagram', this.electronOnMenuOpenDiagramHook);
this.ipcRenderer.removeListener('menubar__start_opening_solution', this.electronOnMenuOpenSolutionHook);
this.ipcRenderer.removeListener('menubar__start_create_diagram', this.electronOnCreateDiagram);
}
private openDiagramOnDropBehaviour: EventListener = async (event: DragEvent): Promise<void> => {
event.preventDefault();
const loadedFiles: FileList = event.dataTransfer.files;
const urisToOpen: Array<string> = Array.from(loadedFiles).map((file: IFile): string => {
return file.path;
});
const openingPromises: Array<Promise<void>> = urisToOpen.map(
(uri: string): Promise<void> => {
return this.openDiagramOrDisplayError(uri);
},
);
await Promise.all(openingPromises);
};
// TODO: This method is copied all over the place.
private async navigateToDetailView(diagram: IDiagram, solution: ISolutionEntry): Promise<void> {
await this.router.navigateToRoute('design', {
diagramName: diagram.name,
diagramUri: diagram.uri,
solutionUri: solution.uri,
});
}
} | the_stack |
// The MIT License (MIT)
//
// vs-deploy (https://github.com/mkloubert/vs-deploy)
// Copyright (c) Marcel Joachim Kloubert <marcel.kloubert@gmx.net>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
import * as deploy_contracts from '../contracts';
import * as deploy_globals from '../globals';
import * as deploy_helpers from '../helpers';
import * as deploy_objects from '../objects';
import * as deploy_plugins from '../plugins';
import * as deploy_values from '../values';
import * as deploy_workspace from '../workspace';
import * as FS from 'fs';
import * as i18 from '../i18';
import * as Path from 'path';
import * as Workflows from 'node-workflows';
import * as vscode from 'vscode';
interface DeployTargetPrompt extends deploy_contracts.DeployTarget {
prompts?: PromptEntry | PromptEntry[];
targets: string | string[];
}
interface PromptEntry {
cache?: boolean;
converter?: string;
converterOptions?: any;
defaultValue?: any;
ignoreFocusOut?: boolean;
isPassword?: boolean;
properties?: string | string[];
showAlways?: boolean;
text?: string;
type?: string;
validator?: string;
validatorOptions?: any;
valuePlaceHolder?: string;
handleAs?: string;
}
/**
* A module for converting a (string) value.
*/
export interface ValueConverterModule {
/**
* The converter function / method.
*/
convert: ValueConverterModuleExecutor;
}
/**
* Converts a (string) value.
*
* @param {ValueConverterModuleExecutorArguments} args The arguments for the execution.
*
* @return {any} The result.
*/
export type ValueConverterModuleExecutor = (args: ValueConverterModuleExecutorArguments) => any;
/**
* Arguments for a value converter function / method.
*/
export interface ValueConverterModuleExecutorArguments extends deploy_contracts.ScriptArguments {
/**
* Options for the script.
*/
options: any;
/**
* The name of the properties where to write the value to.
*/
properties: string[];
/**
* The name of the targets where to write the value to.
*/
targets: string[];
/**
* The type from the target settings.
*/
type: string;
/**
* The value to convert.
*/
value: string;
}
/**
* A validator module.
*/
export interface ValueValidatorModule {
/**
* Validates a value.
*/
validate: ValueValidatorModuleExecutor;
}
/**
* A validator function / method.
*
* @param {ValueValidatorModuleExecutorArguments} args The arguments.
*
* @return {boolean|undefined|null} User value is valid or not.
*/
export type ValueValidatorModuleExecutor = (args: ValueValidatorModuleExecutorArguments) => boolean | undefined | null;
/**
* Arguments for a validator function.
*/
export interface ValueValidatorModuleExecutorArguments extends deploy_contracts.ScriptArguments {
/**
* The custom error message.
*/
message?: string;
/**
* Options for the script.
*/
options: any;
/**
* The name of the properties where to write the value to.
*/
properties: string[];
/**
* The name of the targets where to write the value to.
*/
targets: string[];
/**
* The user value to check.
*/
value: string;
}
const NOT_FOUND_IN_CACHE = Symbol('NOT_FOUND_IN_CACHE');
class PromptPlugin extends deploy_objects.MultiFileDeployPluginBase {
public deployWorkspace(files: string[], target: DeployTargetPrompt, opts?: deploy_contracts.DeployWorkspaceOptions): void {
if (!opts) {
opts = {};
}
let hasCancelled = false;
let completedInvoked = false;
let completed = (err: any) => {
if (completedInvoked) {
return;
}
completedInvoked = true;
if (opts.onCompleted) {
opts.onCompleted(me, {
canceled: hasCancelled,
error: err,
target: target,
});
}
};
let me = this;
me.onCancelling(() => hasCancelled = true, opts);
let afterWorkflowsAction = (err: any, ctx: Workflows.WorkflowActionContext) => {
if (ctx && hasCancelled) {
ctx.finish();
}
};
try {
// target.targets
let targets = deploy_helpers.asArray(target.targets)
.map(t => deploy_helpers.normalizeString(t))
.filter(t => '' !== t);
targets = deploy_helpers.distinctArray(targets);
// target.prompts
let prompts = deploy_helpers.asArray(target.prompts)
.filter(x => x);
let wf = Workflows.create();
// prepare workflow
wf.next((wfCtx) => {
wfCtx.value = {
properties: {},
propertiesToCache: [],
};
});
// create an action
// for each entry in
// 'prompts'
prompts.forEach((p, propmptIndex) => {
// collect property names
let propertyNames = deploy_helpers.asArray(p.properties)
.map(x => deploy_helpers.toStringSafe(x).trim())
.filter(x => '' !== x);
propertyNames = deploy_helpers.distinctArray(propertyNames);
let promptValueToString = (v: any, defValue?: string) => {
return deploy_helpers.isNullOrUndefined(v) ? defValue : deploy_helpers.toStringSafe(v);
};
let cacheKey = `prompts::${propmptIndex}`;
let doCache = deploy_helpers.toBooleanSafe(p.cache);
let ignoreFocusOut = deploy_helpers.toBooleanSafe(p.ignoreFocusOut, true);
let showAlways = deploy_helpers.toBooleanSafe(p.showAlways);
// the validator for the input
let validator: ValueValidatorModuleExecutor;
if (!deploy_helpers.isEmptyString(p.validator)) {
let validatorScript = deploy_helpers.toStringSafe(p.validator);
validatorScript = me.context.replaceWithValues(validatorScript);
if (!Path.isAbsolute(validatorScript)) {
validatorScript = Path.join(deploy_workspace.getRootPath(), validatorScript);
}
validatorScript = Path.resolve(validatorScript);
let validatorModule = deploy_helpers.loadModule<ValueValidatorModule>(validatorScript);
if (validatorModule) {
validator = validatorModule.validate;
}
}
if (!validator) {
validator = (args) => {
return true;
};
}
let converter: ValueConverterModuleExecutor;
if (!deploy_helpers.isEmptyString(p.converter)) {
let converterScript = deploy_helpers.toStringSafe(p.converter);
converterScript = me.context.replaceWithValues(converterScript);
if (!Path.isAbsolute(converterScript)) {
converterScript = Path.join(deploy_workspace.getRootPath(), converterScript);
}
converterScript = Path.resolve(converterScript);
let converterModule = deploy_helpers.loadModule<ValueConverterModule>(converterScript);
if (converterModule) {
converter = converterModule.convert;
}
}
// create action
wf.next((wfCtx) => {
let properties = wfCtx.value['properties'];
let propertiesToCache: any[] = wfCtx.value['propertiesToCache'];
let targetType = deploy_helpers.normalizeString(p.type);
let valueConverter = converter;
// define value converter
if (!valueConverter) {
valueConverter = (args) => {
switch (args.type) {
case 'bool':
case 'boolean':
{
switch (deploy_helpers.normalizeString(args.value)) {
case '1':
case 'true':
case 'yes':
case 'y':
return true;
case '':
return undefined;
}
return false;
}
// bool
case 'int':
case 'integer':
return parseInt(deploy_helpers.toStringSafe(args.value).trim());
case 'float':
case 'number':
return parseFloat(deploy_helpers.toStringSafe(args.value).trim());
case 'json':
case 'obj':
case 'object':
return JSON.parse(args.value);
case 'file':
{
let src = deploy_values.replaceWithValues(me.context.values(), args.value);
return new Promise<any>((res, rej) => {
deploy_helpers.loadFrom(src).then((result) => {
try {
let val: any;
if (result.data && result.data.length > 0) {
val = JSON.parse(result.data.toString('utf8'));
}
res(val);
}
catch (e) {
rej(e);
}
}).catch((err) => {
rej(err);
});
});
}
// file
}
return args.value;
};
}
return new Promise<any>((resolve, reject) => {
let completed = (err: any, userValue?: any) => {
if (err) {
reject(err);
}
else {
if ('undefined' !== typeof userValue) {
if (doCache) {
propertiesToCache.push({
'key': cacheKey,
'value': userValue,
});
}
let args: ValueConverterModuleExecutorArguments = {
emitGlobal: function() {
return deploy_globals.EVENTS
.emit
.apply(deploy_globals.EVENTS, arguments);
},
globals: me.context.globals(),
options: deploy_helpers.cloneObject(p.converterOptions),
properties: propertyNames,
require: function(id) {
return me.context.require(id);
},
targets: targets,
type: targetType,
value: userValue,
};
try {
Promise.resolve( valueConverter(args) ).then((valueToSet) => {
propertyNames.forEach(pn => {
properties[pn] = valueToSet;
});
resolve();
}).catch((err) => {
reject(err);
});
}
catch (e) {
reject(e);
}
}
else {
wfCtx.finish();
hasCancelled = true;
resolve();
}
}
};
let cachedValue = me.context.targetCache().get(target, cacheKey, NOT_FOUND_IN_CACHE);
let defaultValue = p.defaultValue;
if (NOT_FOUND_IN_CACHE !== cachedValue) {
defaultValue = cachedValue; // use cached value as default one
}
if (!showAlways && doCache && (NOT_FOUND_IN_CACHE !== cachedValue)) {
// is cached
completed(null,
cachedValue);
}
else {
// ask the user for value
let propertyPlaceHolders: deploy_values.ValueBase[] = [];
for (let pn in properties) {
propertyPlaceHolders.push(new deploy_values.StaticValue({
name: pn,
value: properties[pn],
}));
}
let promptText = deploy_helpers.toStringSafe(p.text);
promptText = deploy_values.replaceWithValues(propertyPlaceHolders, promptText);
promptText = deploy_values.replaceWithValues(me.context.values(), promptText);
promptText = !deploy_helpers.isEmptyString(promptText) ? promptText : undefined;
let placeHolderText = deploy_helpers.toStringSafe(p.valuePlaceHolder);
placeHolderText = deploy_values.replaceWithValues(propertyPlaceHolders, placeHolderText);
placeHolderText = deploy_values.replaceWithValues(me.context.values(), placeHolderText);
placeHolderText = !deploy_helpers.isEmptyString(placeHolderText) ? placeHolderText : undefined;
const CONVERT_INPUT_VALUE = (inputVal: string): any => {
switch (deploy_helpers.normalizeString(p.handleAs)) {
case 'list':
return JSON.parse('[ ' + inputVal + ' ]');
case 'object':
if (deploy_helpers.isEmptyString(inputVal)) {
return undefined;
}
return JSON.parse(inputVal);
}
return inputVal;
};
vscode.window.showInputBox({
ignoreFocusOut: ignoreFocusOut,
password: deploy_helpers.toBooleanSafe(p.isPassword),
placeHolder: placeHolderText,
prompt: promptText,
value: promptValueToString(defaultValue),
validateInput: (v) => {
let errMsg: string;
try {
let args: ValueValidatorModuleExecutorArguments = {
emitGlobal: function() {
return deploy_globals.EVENTS
.emit
.apply(deploy_globals.EVENTS, arguments);
},
globals: me.context.globals(),
options: deploy_helpers.cloneObject(p.validatorOptions),
properties: propertyNames,
require: function(id) {
return me.context.require(id);
},
targets: targets,
value: CONVERT_INPUT_VALUE(v),
};
let isValid = deploy_helpers.toBooleanSafe(validator(args), true);
if (!isValid) {
errMsg = deploy_helpers.toStringSafe(args.message);
if (deploy_helpers.isEmptyString(errMsg)) {
errMsg = i18.t('plugins.prompt.invalidInput');
}
}
}
catch (e) {
errMsg = i18.t('errors.withCategory',
'PromptPlugin.deployWorkspace().showInputBox().validateInput', e);
}
if (deploy_helpers.isEmptyString(errMsg)) {
errMsg = undefined;
}
return errMsg;
},
}).then((userValue) => {
try {
completed(null,
CONVERT_INPUT_VALUE(userValue));
}
catch (e) {
completed(e);
}
}, (err) => {
completed(err);
});
}
});
});
});
// start deployment
wf.next(async (wfCtx) => {
let properties = wfCtx.value['properties'];
let wfTargets = Workflows.create();
// create a sub workflow for
// each target
me.getTargetsWithPlugins(target, targets).forEach(tp => {
let clonedTarget: Object = deploy_helpers.cloneObject(tp.target);
// fill properties of
// current target with data
wfTargets.next(() => {
for (let p in properties) {
clonedTarget[p] = properties[p];
}
});
// execute for each underlying plugin
wfTargets.next(async () => {
let wfPlugins = Workflows.create();
tp.plugins.forEach(pi => {
wfPlugins.next(() => {
return new Promise<any>((resolve, reject) => {
try {
pi.deployWorkspace(files, clonedTarget, {
baseDirectory: opts.baseDirectory,
context: deploy_plugins.createPluginContext(opts.context || me.context),
onBeforeDeployFile: (sender, e) => {
if (opts.onBeforeDeployFile) {
opts.onBeforeDeployFile(me, {
destination: e.destination,
file: e.file,
target: clonedTarget,
});
}
},
onCompleted: (sender, e) => {
if (e.error) {
reject(e.error);
}
else {
resolve();
}
},
onFileCompleted: (sender, e) => {
if (opts.onFileCompleted) {
opts.onFileCompleted(me, {
canceled: e.canceled,
error: e.error,
file: e.file,
target: clonedTarget,
});
}
},
});
}
catch (e) {
reject(e);
}
});
});
});
wfPlugins.on('action.after',
afterWorkflowsAction);
await wfPlugins.start();
});
});
wfTargets.on('action.after',
afterWorkflowsAction);
await wfTargets.start();
});
// cache values
wf.next((wfCtx) => {
let propertiesToCache: any[] = wfCtx.value['propertiesToCache'];
propertiesToCache.forEach(ptc => {
me.context.targetCache()
.set(target, ptc.key, ptc.value);
});
});
wf.on('action.after',
afterWorkflowsAction);
// start the workflow
wf.start().then(() => {
completed(null);
}).catch((err) => {
completed(err);
});
}
catch (e) {
completed(e);
}
}
public info(): deploy_contracts.DeployPluginInfo {
return {
description: i18.t('plugins.prompt.description'),
};
}
}
/**
* Creates a new Plugin.
*
* @param {deploy_contracts.DeployContext} ctx The deploy context.
*
* @returns {deploy_contracts.DeployPlugin} The new instance.
*/
export function createPlugin(ctx: deploy_contracts.DeployContext): deploy_contracts.DeployPlugin {
return new PromptPlugin(ctx);
} | the_stack |
import * as pathLib from "path";
import * as fs from "fs";
import { IConfigUnsupported } from "../api/IChecker";
import { IScanSummary, IScanSummaryCounts } from "./ReportUtil";
declare var after;
/**
* This function is responsible for constructing the aChecker Reporter which will be used to, report
* the scan results, such as writing the page results and the summary to a JSON file. This reporter function
* is registered with Karma server and triggered based on events that are triggered by the karma communication.
*
* @param {Object} baseReporterDecorator - the base karma reporter, which had the base functions which we override
* @param {Object} config - All the Karma configuration, we will extract what we need from this over
* all object, we need the entire object so that we detect any changes in the object
* as other plugins are loaded and the object is updated dynamically.
* @param {Object} logger - logger object which is used to log debug/error/info messages
* @param {Object} emitter - emitter object which allows to listem on any event that is triggered and allow to execute
* custom code, to handle the event that was triggered.
*
* @return - N/A
*
* @memberOf this
*/
export class ACReporterJSON {
Config: IConfigUnsupported;
scanSummary: IScanSummary;
constructor(Config: IConfigUnsupported, scanSummary: IScanSummary) {
this.Config = Config;
this.scanSummary = scanSummary;
this.Config.DEBUG && console.log("START ACReporter Constructor");
let myThis = this;
if (typeof(after) !== "undefined") {
after(function(done) {
myThis.onRunComplete();
done && done();
});
} else {
process.on('beforeExit', function() {
myThis.onRunComplete();
});
}
this.Config.DEBUG && console.log("END ACReporter Constructor");
}
// This emitter function is responsible for calling this function when the info event is detected
report(info) {
this.Config.DEBUG && console.log("START 'info' emitter function");
// Save the results of a single scan to a JSON file based on the label provided
this.savePageResults(info);
// Update the overall summary object count object to include the new scan that was performed
this.addToSummaryCount(info.summary.counts);
// Save the summary of this scan into global space of this reporter, to be logged
// once the whole scan is done.
this.addResultsToGlobal(info);
this.Config.DEBUG && console.log("END 'info' emitter function");
};
/**
* This function is responsible for performing any action when the entire karma run is done.
* Overrides onRunComplete function from baseReporterDecorator
*
* @override
*
* @memberOf this
*/
onRunComplete() {
this.Config.DEBUG && console.log("START 'ACReporterJSON:onRunComplete' function");
// Add End time when the whole karma run is done
// End time will be in milliseconds elapsed since 1 January 1970 00:00:00 UTC up until now.
this.scanSummary.endReport = Date.now();
// Save summary object to a JSON file.
this.saveSummary(this.scanSummary);
this.Config.DEBUG && console.log("END 'ACReporterJSON:onRunComplete' function");
};
/**
* This function is responsible for saving a single scans results to a file as JSON. On a side note
* this function will also extract the label which will be the file names where the results will be
* saved.
*
* @param {Object} config - Karma config object, used to extrat the outputFolder from the ACConfig.
* @param {Object} results - Provide the scan results for a single page that should be saved.
*
* @memberOf this
*/
savePageResults(results) {
this.Config.DEBUG && console.log("START 'savePageResults' function");
// Extract the outputFolder from the ACConfig (this is the user config that they provid)
let resultDir = this.Config.outputFolder;
this.Config.DEBUG && console.log("Results are going to be stored under results directory: \"" + resultDir + "\"");
// Build the full file name based on the label provide in the results and also the results dir specified in the
// configuration.
let resultsFileName = pathLib.join(resultDir, results.label + '.json');
/**************************************************** DEBUG INFORMATION ***************************************************************
// Debug example which has label which has unix "/" in them.
let resultsFileName = pathLib.join(resultDir, "dependencies/tools-rules-html/v2/a11y/test/g471/Table-layoutMultiple.html.json");
// Debug example which has a label which has Windows "\" in them.
let resultsFileName = pathLib.join(resultDir, "dependencies\\tools-rules-html\\v2\\a11y\\test\\g471\\Table-layoutMultiple.html.json");
***************************************************************************************************************************************/
// Write the results object as JSON to a file.
this.writeObjectToFileAsJSON(resultsFileName, results);
this.Config.DEBUG && console.log("END 'savePageResults' function");
}
/**
* This function is responsible for converting a javascript object into JSON and then writing that to a
* json file.
*
* @param {String} fileName - Full path of file where the JSON object should be stored
* @param {String} content - The javascript object which should be converted and saved to file as JSON.
*
* @memberOf this
*/
writeObjectToFileAsJSON(fileName, content) {
this.Config.DEBUG && console.log("START 'writeObjectToFileAsJSON' function");
// Extract the parent directory of the file name that is provided
let parentDir = pathLib.dirname(fileName);
this.Config.DEBUG && console.log("Parent Directoy: \"" + parentDir + "\"");
// In the case that the parent directoy does not exist, create the directories
if (!fs.existsSync(parentDir)) {
// Create the parent directory recerseivly if it does not exist.
fs.mkdirSync(parentDir, { recursive: true});
}
this.Config.DEBUG && console.log("Object will be written to file: \"" + fileName + "\"");
// Convert the Object into JSON string and write that to the file
// Make sure to use utf-8 encoding to avoid an issues specify to OS.
// In terms of the JSON string that is constructed use 4 spaces to format the JSON object, before
// writing it to the file.
fs.writeFileSync(fileName, JSON.stringify(content, null, ' '), { encoding: 'utf-8' });
this.Config.DEBUG && console.log("END 'writeObjectToFileAsJSON' function");
}
/**
* This function is responsible for saving the summary object of the while scan to a summary file.
*
* @param {Object} summary - The summary object that needs to be written to the summary file.
*
* @memberOf this
*/
saveSummary(summary) {
if (this.Config.outputFormat.indexOf("json") === -1) {
return;
}
this.Config.DEBUG && console.log("START 'saveSummary' function");
// Fetch the start time of the report from the summary object
let startReportTime = summary.startReport;
// Extract the outputFolder from the ACConfig (this is the user config that they provid)
let resultDir = this.Config.outputFolder;
this.Config.DEBUG && console.log("Converting: " + startReportTime);
// Now we need to take the from epoch format date and convert it to readable data
// Construct a new Data object with the start report time
let formattedData = new Date(startReportTime);
// Extract all the date fields which are needed to construct the filename
let year = this.datePadding(formattedData.getUTCFullYear());
let month = this.datePadding(formattedData.getUTCMonth()+1); // UTC Month is provid in a range of A Number, from 0-11, representing the month
let date = this.datePadding(formattedData.getUTCDate());
let hour = this.datePadding(formattedData.getHours());
let minute = this.datePadding(formattedData.getMinutes());
let seconds = this.datePadding(formattedData.getUTCSeconds());
this.Config.DEBUG && console.log("Year: " + year);
this.Config.DEBUG && console.log("Month: " + month);
this.Config.DEBUG && console.log("Date: " + date);
this.Config.DEBUG && console.log("Hour: " + hour);
this.Config.DEBUG && console.log("Minute: " + minute);
this.Config.DEBUG && console.log("Seconds: " + seconds);
// Build the summary file name based on the following format: summary_2016-06-20-13-26-45GMT.json
// summary_<year>-<month>-<date>-<hour>-<minute>-<seconds>GMT.json
let filename = "summary_" + year + "-" + month + "-" + date + "-" + hour + "-" + minute + "-" + seconds + "GMT.json";
// Add the results dir to the filename so that all the summary files can be saved under the output folder
filename = pathLib.join(resultDir, filename);
this.Config.DEBUG && console.log("Filename Constructed: " + filename);
// Write the summary object as json to the summary file.
this.writeObjectToFileAsJSON(filename, summary);
this.Config.DEBUG && console.log("END 'saveSummary' function");
}
/**
* This function is responsible for checking if a number needs a leading 0, and if it is needed
* add the leading 0 and return that as the new number.
*
* @param {int} number - Provide a number to check if a leading 0 needs to be added or not.
*
* @return {String} number - Return the number with the leading 0 added back
*
* @memberOf this
*/
datePadding(number) {
this.Config.DEBUG && console.log("START 'datePadding' function");
// In the case that the number is less then 10 we need to add the leading '0' to the number.
number = number < 10 ? '0' + number : number;
this.Config.DEBUG && console.log("END 'datePadding' function");
return number;
}
/**
* This function is responsible for indexing the results into global spaces based on label.
*
* @param {Object} results - Results object which will be provided to the user/wroten to the file.
* Refer to aChecker.buildReport function's return to figure out what the object
* will look like.
*
* @return - N/A - Global object is updated with the results
*
* @memberOf this
*/
addResultsToGlobal(results) {
this.Config.DEBUG && console.log("START 'addResultsToGlobal' function");
// Build the single page summary object to follow the following format:
// "label": "dependencies/tools-rules-html/v2/a11y/test/g471/Table-DataNoSummaryARIA.html",
// "counts": {
// "violation": 1,
// "potentialviolation": 0,
// "recommendation": 0,
// "potentialrecommendation": 0,
// "manual": 0
// }
let pageSummaryObject = {
label: results.label,
counts: results.summary.counts
}
this.Config.DEBUG && console.log("Adding following object to scanSummary.pageScanSummary: ");
this.Config.DEBUG && console.log(pageSummaryObject);
// Add the summary count for this scan to the pageScanSummary object which is in the global space
// Index this by the label.
this.scanSummary.pageScanSummary.push(pageSummaryObject);
this.Config.DEBUG && console.log("END 'addResultsToGlobal' function");
}
/**
* This function is responsible for updating/creating the global violation summary for the engine karma run
* for browser that it is running on. Will take the pageCount object which is part of the page object and
* add extract the values for each of the levels and add them to the global object. This will provide an overall
* summary of violations for all testcases run and all scans done.
*
* @param {Object} pageCount - Provide the page count object, in the following format:
*
* @return N/A - Global summary object is updated with the counts
*
* @memberOf this
*/
addToSummaryCount(pageCount) {
// Variable Decleration
let ACScanSummary : IScanSummaryCounts = this.scanSummary.counts || {};
let addedToSummary = false;
// In the case ACScanSummary is empty, simply assign pageCount to ACScanSummary
if (Object.keys(ACScanSummary).length === 0) {
// Set pageCount as the summary count
ACScanSummary = pageCount;
addedToSummary = true;
}
// In the case that this is not first scan, handle adding up the summary
if (!addedToSummary) {
// Go through the pageCount object and for each of the levels, extract the value
// and add it to the aChecker violation summary object.
// This will keep track of an overall summary of the violations for all testscases, that
// were run for a single karma run.
for (let level in pageCount) {
ACScanSummary[level] += pageCount[level];
}
}
// Assign the new violation summary back to the global object
this.scanSummary.counts = ACScanSummary;
}
}; | the_stack |
/// <reference types="node" />
import * as stream from "stream";
// Markup data
//-----------------------------------------------------------------------------------
declare namespace MarkupData {
interface Location {
/**
* One-based line index
*/
line: number;
/**
* One-based column index
*/
col: number;
/**
* Zero-based first character index
*/
startOffset: number;
/**
* Zero-based last character index
*/
endOffset: number;
}
interface AttributesLocation {
[attributeName: string]: Location;
}
interface StartTagLocation extends Location {
/**
* Start tag attributes' location info
*/
attrs: AttributesLocation
}
interface ElementLocation extends StartTagLocation {
/**
* Element's start tag location info.
*/
startTag: StartTagLocation;
/**
* Element's end tag location info.
*/
endTag: Location;
}
}
// Options
//-----------------------------------------------------------------------------------
declare namespace Options {
export interface ParserOptions {
/**
* Enables source code location information. When enabled, each node (except the root node) will have a `__location` property.
* If the node is not an empty element, `__location` will be a {@link MarkupData.ElementLocation} object, otherwise it will be {@link MarkupData.Location}.
* If the element was implicitly created by the parser (as part of [tree correction](https://html.spec.whatwg.org/multipage/syntax.html#an-introduction-to-error-handling-and-strange-cases-in-the-parser)), its `__location` property will be `undefined`.
*
* **Default:** `false`
*/
locationInfo?: boolean;
/**
* Specifies the resulting tree format.
*
* **Default:** `treeAdapters.default`
*/
treeAdapter?: AST.TreeAdapter;
}
export interface SAXParserOptions {
/**
* Enables source code location information for the tokens.
* When enabled, each token event handler will receive {@link MarkupData.Location} (or {@link MarkupData.StartTagLocation})
* object as its last argument.
*/
locationInfo?: boolean;
}
export interface SerializerOptions {
/***
* Specifies input tree format.
*
* **Default:** `treeAdapters.default`
*/
treeAdapter?: AST.TreeAdapter;
}
}
// AST
//-----------------------------------------------------------------------------------
declare namespace AST {
/**
* [Document mode](https://dom.spec.whatwg.org/#concept-document-limited-quirks).
*/
type DocumentMode = 'no-quirks' | 'quirks' | 'limited-quirks';
// Default tree adapter
namespace Default {
/**
* Element attribute.
*/
interface Attribute {
/**
* The name of the attribute.
*/
name: string;
/**
* The value of the attribute.
*/
value: string;
/**
* The namespace of the attribute.
*/
namespace?: string;
/**
* The namespace-related prefix of the attribute.
*/
prefix?: string;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} Node interface.
*/
interface Node {
/**
* The name of the node. E.g. {@link Document} will have `nodeName` equal to '#document'`.
*/
nodeName: string;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} ParentNode interface.
*/
interface ParentNode {
/**
* Child nodes.
*/
childNodes: Node[];
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} DocumentType interface.
*/
export interface DocumentType extends Node {
/**
* The name of the node.
*/
nodeName: '#documentType';
/**
* Document type name.
*/
name: string;
/**
* Document type public identifier.
*/
publicId: string;
/**
* Document type system identifier.
*/
systemId: string;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} Document interface.
*/
export interface Document extends ParentNode {
/**
* The name of the node.
*/
nodeName: '#document';
/**
* [Document mode](https://dom.spec.whatwg.org/#concept-document-limited-quirks).
*/
mode: DocumentMode;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} DocumentFragment interface.
*/
export interface DocumentFragment extends ParentNode {
/**
* The name of the node.
*/
nodeName: '#document-fragment';
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} Element interface.
*/
export interface Element extends ParentNode {
/**
* The name of the node. Equals to element {@link tagName}.
*/
nodeName: string;
/**
* Element tag name.
*/
tagName: string;
/**
* Element namespace.
*/
namespaceURI: string;
/**
* List of element attributes.
*/
attrs: Attribute[];
/**
* Parent node.
*/
parentNode: ParentNode;
/**
* Element source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.ElementLocation;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} CommentNode interface.
*/
export interface CommentNode extends Node {
/**
* The name of the node.
*/
nodeName: '#comment';
/**
* Comment text.
*/
data: string;
/**
* Parent node.
*/
parentNode: ParentNode;
/**
* Comment source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.Location;
}
/**
* [Default tree adapter]{@link parse5.treeAdapters} TextNode interface.
*/
export interface TextNode extends Node {
/**
* The name of the node.
*/
nodeName: '#text';
/**
* Text content.
*/
value: string;
/**
* Parent node.
*/
parentNode: ParentNode;
/**
* Text node source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.Location;
}
}
// htmlparser2 tree adapter
namespace HtmlParser2 {
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} Node interface.
*/
interface Node {
/**
* The type of the node. E.g. {@link Document} will have `type` equal to 'root'`.
*/
type: string;
/**
* [DOM spec](https://dom.spec.whatwg.org/#dom-node-nodetype)-compatible node {@link type}.
*/
nodeType: number;
/**
* Parent node.
*/
parent: ParentNode;
/**
* Same as {@link parent}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
parentNode: ParentNode;
/**
* Previous sibling.
*/
prev: Node;
/**
* Same as {@link prev}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
previousSibling: Node;
/**
* Next sibling.
*/
next: Node;
/**
* Same as {@link next}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
nextSibling: Node;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} ParentNode interface.
*/
interface ParentNode extends Node {
/**
* Child nodes.
*/
children: Node[];
/**
* Same as {@link children}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
childNodes: Node[];
/**
* First child of the node.
*/
firstChild: Node;
/**
* Last child of the node.
*/
lastChild: Node;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} DocumentType interface.
*/
export interface DocumentType extends Node {
/**
* The type of the node.
*/
type: 'directive';
/**
* Node name.
*/
name: '!doctype';
/**
* Serialized doctype {@link name}, {@link publicId} and {@link systemId}.
*/
data: string;
/**
* Document type name.
*/
'x-name':string;
/**
* Document type public identifier.
*/
'x-publicId': string;
/**
* Document type system identifier.
*/
'x-systemId': string;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} Document interface.
*/
export interface Document extends ParentNode {
/**
* The type of the node.
*/
type: 'root';
/**
* The name of the node.
*/
name: 'root';
/**
* [Document mode](https://dom.spec.whatwg.org/#concept-document-limited-quirks).
*/
'x-mode': DocumentMode;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} DocumentFragment interface.
*/
export interface DocumentFragment extends ParentNode {
/**
* The type of the node.
*/
type: 'root';
/**
* The name of the node.
*/
name: 'root';
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} Element interface.
*/
export interface Element extends ParentNode {
/**
* The name of the node. Equals to element {@link tagName}.
*/
name: string;
/**
* Element tag name.
*/
tagName: string;
/**
* Element namespace.
*/
namespace: string;
/**
* Element attributes.
*/
attribs: { [name: string]: string };
/**
* Element attribute namespaces.
*/
'x-attribsNamespace': { [name: string]: string };
/**
* Element attribute namespace-related prefixes.
*/
'x-attribsPrefix': { [name: string]: string };
/**
* Element source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.ElementLocation;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} CommentNode interface.
*/
export interface CommentNode extends Node {
/**
* The name of the node.
*/
name: 'comment';
/**
* Comment text.
*/
data: string;
/**
* Same as {@link data}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
nodeValue: string;
/**
* Comment source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.Location;
}
/**
* [htmlparser2 tree adapter]{@link parse5.treeAdapters} TextNode interface.
*/
export interface TextNode extends Node {
/**
* The name of the node.
*/
name: 'text';
/**
* Text content.
*/
data: string;
/**
* Same as {@link data}. [DOM spec](https://dom.spec.whatwg.org)-compatible alias.
*/
nodeValue: string;
/**
* Comment source code location info. Available if location info is enabled via {@link Options.ParserOptions}.
*/
__location?: MarkupData.Location;
}
}
// Unions
// NOTE: we use `Object` in unions to support custom tree adapter implementations.
// TypeScript Handbook suggests to always use `any` instead of `Object`, but in that
// case language service hints `any` as type, instead of actual union name.
/**
* Generic Node interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.Node}) to get access to the properties.
*/
type Node = Default.Node | HtmlParser2.Node | Object;
/**
* Generic ParentNode interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.ParentNode}) to get access to the properties.
*/
type ParentNode = Default.ParentNode | HtmlParser2.ParentNode | Object;
/**
* Generic DocumentType interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.DocumentType}) to get access to the properties.
*/
type DocumentType = Default.DocumentType | HtmlParser2.DocumentType | Object;
/**
* Generic Document interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.Document}) to get access to the properties.
*/
type Document = Default.Document | HtmlParser2.Document | Object;
/**
* Generic DocumentFragment interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.DocumentFragment}) to get access to the properties.
*/
type DocumentFragment = Default.DocumentFragment | HtmlParser2.DocumentFragment | Object;
/**
* Generic Element interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.Element}) to get access to the properties.
*/
type Element = Default.Element | HtmlParser2.Element | Object;
/**
* Generic TextNode interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.TextNode}) to get access to the properties.
*/
type TextNode = Default.TextNode | HtmlParser2.TextNode | Object;
/**
* Generic CommentNode interface.
* Cast to the actual AST interface (e.g. {@link parse5.AST.Default.CommentNode}) to get access to the properties.
*/
type CommentNode = Default.CommentNode | HtmlParser2.CommentNode | Object;
// Tree adapter interface
//-----------------------------------------------------------------------------------
/**
* Tree adapter is a set of utility functions that provides minimal required abstraction layer beetween parser and a specific AST format.
* Note that `TreeAdapter` is not designed to be a general purpose AST manipulation library. You can build such library
* on top of existing `TreeAdapter` or use one of the existing libraries from npm.
*
* @see [default implementation](https://github.com/inikulin/parse5/blob/master/lib/tree_adapters/default.js)
*/
export interface TreeAdapter {
/**
* Creates a document node.
*/
createDocument(): AST.Document;
/**
* Creates a document fragment node.
*/
createDocumentFragment(): AST.DocumentFragment;
/**
* Creates an element node.
*
* @param tagName - Tag name of the element.
* @param namespaceURI - Namespace of the element.
* @param attrs - Attribute name-value pair array. Foreign attributes may contain `namespace` and `prefix` fields as well.
*/
createElement(tagName: string, namespaceURI: string, attrs: AST.Default.Attribute[]): AST.Element;
/**
* Creates a comment node.
*
* @param data - Comment text.
*/
createCommentNode(data: string): AST.CommentNode;
/**
* Appends a child node to the given parent node.
*
* @param parentNode - Parent node.
* @param newNode - Child node.
*/
appendChild(parentNode: AST.ParentNode, newNode: AST.Node): void;
/**
* Inserts a child node to the given parent node before the given reference node.
*
* @param parentNode - Parent node.
* @param newNode - Child node.
* @param referenceNode - Reference node.
*/
insertBefore(parentNode: AST.ParentNode, newNode: AST.Node, referenceNode: AST.Node): void;
/**
* Sets the `<template>` element content element.
*
* @param templateElement - `<template>` element.
* @param contentElement - Content element.
*/
setTemplateContent(templateElement: AST.Element, contentElement: AST.DocumentFragment): void;
/**
* Returns the `<template>` element content element.
*
* @param templateElement - `<template>` element.
*/
getTemplateContent(templateElement: AST.Element): AST.DocumentFragment;
/**
* Sets the document type. If the `document` already contains a document type node, the `name`, `publicId` and `systemId`
* properties of this node will be updated with the provided values. Otherwise, creates a new document type node
* with the given properties and inserts it into the `document`.
*
* @param document - Document node.
* @param name - Document type name.
* @param publicId - Document type public identifier.
* @param systemId - Document type system identifier.
*/
setDocumentType(document: AST.Document, name: string, publicId: string, systemId: string): void;
/**
* Sets the [document mode](https://dom.spec.whatwg.org/#concept-document-limited-quirks).
*
* @param document - Document node.
* @param mode - Document mode.
*/
setDocumentMode(document: AST.Document, mode: AST.DocumentMode): void;
/**
* Returns [document mode](https://dom.spec.whatwg.org/#concept-document-limited-quirks).
*
* @param document - Document node.
*/
getDocumentMode(document: AST.Document): AST.DocumentMode;
/**
* Removes a node from its parent.
*
* @param node - Node to remove.
*/
detachNode(node: AST.Node): void;
/**
* Inserts text into a node. If the last child of the node is a text node, the provided text will be appended to the
* text node content. Otherwise, inserts a new text node with the given text.
*
* @param parentNode - Node to insert text into.
* @param text - Text to insert.
*/
insertText(parentNode: AST.ParentNode, text: string): void;
/**
* Inserts text into a sibling node that goes before the reference node. If this sibling node is the text node,
* the provided text will be appended to the text node content. Otherwise, inserts a new sibling text node with
* the given text before the reference node.
*
* @param parentNode - Node to insert text into.
* @param text - Text to insert.
* @param referenceNode - Node to insert text before.
*/
insertTextBefore(parentNode: AST.ParentNode, text: string, referenceNode: AST.Node): void;
/**
* Copies attributes to the given element. Only attributes that are not yet present in the element are copied.
*
* @param recipient - Element to copy attributes into.
* @param attrs - Attributes to copy.
*/
adoptAttributes(recipient: AST.Element, attrs: AST.Default.Attribute[]): void;
/**
* Returns the first child of the given node.
*
* @param node - Node.
*/
getFirstChild(node: AST.ParentNode): AST.Node;
/**
* Returns the given node's children in an array.
*
* @param node - Node.
*/
getChildNodes(node: AST.ParentNode): AST.Node[];
/**
* Returns the given node's parent.
*
* @param node - Node.
*/
getParentNode(node: AST.Node): AST.ParentNode;
/**
* Returns the given element's attributes in an array, in the form of name-value pairs.
* Foreign attributes may contain `namespace` and `prefix` fields as well.
*
* @param element - Element.
*/
getAttrList(element: AST.Element): AST.Default.Attribute[];
/**
* Returns the given element's tag name.
*
* @param element - Element.
*/
getTagName(element: AST.Element): string;
/**
* Returns the given element's namespace.
*
* @param element - Element.
*/
getNamespaceURI(element: AST.Element): string;
/**
* Returns the given text node's content.
*
* @param textNode - Text node.
*/
getTextNodeContent(textNode: AST.TextNode): string;
/**
* Returns the given comment node's content.
*
* @param commentNode - Comment node.
*/
getCommentNodeContent(commentNode: AST.CommentNode): string;
/**
* Returns the given document type node's name.
*
* @param doctypeNode - Document type node.
*/
getDocumentTypeNodeName(doctypeNode: AST.DocumentType): string;
/**
* Returns the given document type node's public identifier.
*
* @param doctypeNode - Document type node.
*/
getDocumentTypeNodePublicId(doctypeNode: AST.DocumentType): string;
/**
* Returns the given document type node's system identifier.
*
* @param doctypeNode - Document type node.
*/
getDocumentTypeNodeSystemId(doctypeNode: AST.DocumentType): string;
/**
* Determines if the given node is a text node.
*
* @param node - Node.
*/
isTextNode(node: AST.Node): boolean;
/**
* Determines if the given node is a comment node.
*
* @param node - Node.
*/
isCommentNode(node: AST.Node): boolean;
/**
* Determines if the given node is a document type node.
*
* @param node - Node.
*/
isDocumentTypeNode(node: AST.Node): boolean;
/**
* Determines if the given node is an element.
*
* @param node - Node.
*/
isElementNode(node: AST.Node): boolean;
}
}
// Included tree adapters
//-----------------------------------------------------------------------------------
/**
* Provides built-in tree adapters that can be used for parsing and serialization.
*
* @example
*```js
*
* const parse5 = require('parse5');
*
* // Uses the default tree adapter for parsing.
* const document = parse5.parse('<div></div>', {
* treeAdapter: parse5.treeAdapters.default
* });
*
* // Uses the htmlparser2 tree adapter with the SerializerStream.
* const serializer = new parse5.SerializerStream(node, {
* treeAdapter: parse5.treeAdapters.htmlparser2
* });
* ```
*/
export var treeAdapters: {
/**
* Default tree format for parse5.
*/
default: AST.TreeAdapter,
/**
* Quite popular [htmlparser2](https://github.com/fb55/htmlparser2) tree format
* (e.g. used by [cheerio](https://github.com/MatthewMueller/cheerio) and [jsdom](https://github.com/tmpvar/jsdom)).
*/
htmlparser2: AST.TreeAdapter
};
// Shorthand methods
//-----------------------------------------------------------------------------------
/**
* Parses an HTML string.
*
* @param html - Input HTML string.
* @param options - Parsing options.
*
* @example
* ```js
*
* const parse5 = require('parse5');
*
* const document = parse5.parse('<!DOCTYPE html><html><head></head><body>Hi there!</body></html>');
*
* console.log(document.childNodes[1].tagName); //> 'html'
* ```
*/
export function parse(html: string, options?: Options.ParserOptions): AST.Document;
/**
* Parses an HTML fragment.
*
* @param fragmentContext - Parsing context element. If specified, given fragment will be parsed as if it was set to the context element's `innerHTML` property.
* @param html - Input HTML fragment string.
* @param options - Parsing options.
*
* @example
* ```js
*
* const parse5 = require('parse5');
*
* const documentFragment = parse5.parseFragment('<table></table>');
*
* console.log(documentFragment.childNodes[0].tagName); //> 'table'
*
* // Parses the html fragment in the context of the parsed <table> element.
* const trFragment = parser.parseFragment(documentFragment.childNodes[0], '<tr><td>Shake it, baby</td></tr>');
*
* console.log(trFragment.childNodes[0].childNodes[0].tagName); //> 'td'
* ```
*/
export function parseFragment(fragmentContext: AST.Element, html: string, options?: Options.ParserOptions): AST.DocumentFragment;
export function parseFragment(html: string, options?: Options.ParserOptions): AST.DocumentFragment;
/**
* Serializes an AST node to an HTML string.
*
* @param node - Node to serialize.
* @param options - Serialization options.
*
* @example
* ```js
*
* const parse5 = require('parse5');
*
* const document = parse5.parse('<!DOCTYPE html><html><head></head><body>Hi there!</body></html>');
*
* // Serializes a document.
* const html = parse5.serialize(document);
*
* // Serializes the <html> element content.
* const str = parse5.serialize(document.childNodes[1]);
*
* console.log(str); //> '<head></head><body>Hi there!</body>'
* ```
*/
export function serialize(node: AST.Node, options?: Options.SerializerOptions): string;
// Parser stream
//-----------------------------------------------------------------------------------
/**
* Streaming HTML parser with scripting support.
* A [writable stream](https://nodejs.org/api/stream.html#stream_class_stream_writable).
*
* @example
* ```js
*
* const parse5 = require('parse5');
* const http = require('http');
*
* // Fetch the page content and obtain it's <head> node
* http.get('http://inikulin.github.io/parse5/', res => {
* const parser = new parse5.ParserStream();
*
* parser.once('finish', () => {
* console.log(parser.document.childNodes[1].childNodes[0].tagName); //> 'head'
* });
*
* res.pipe(parser);
* });
* ```
*/
export class ParserStream extends stream.Writable {
/**
* @param options - Parsing options.
*/
constructor(options?: Options.ParserOptions);
/**
* The resulting document node.
*/
document: AST.Document;
/**
* Raised then parser encounters a `<script>` element.
* If this event has listeners, parsing will be suspended once it is emitted.
* So, if `<script>` has the `src` attribute, you can fetch it, execute and then resume parsing just like browsers do.
*
* @param listener.scriptElement - The script element that caused the event.
* @param listener.documentWrite - Write additional `html` at the current parsing position. Suitable for implementing the DOM `document.write` and `document.writeln` methods.
* @param listener.documentWrite.html - HTML to write.
* @param listener.resume - Resumes parsing.
*
* @example
* ```js
*
* const parse = require('parse5');
* const http = require('http');
*
* const parser = new parse5.ParserStream();
*
* parser.on('script', (scriptElement, documentWrite, resume) => {
* const src = parse5.treeAdapters.default.getAttrList(scriptElement)[0].value;
*
* http.get(src, res => {
* // Fetch the script content, execute it with DOM built around `parser.document` and
* // `document.write` implemented using `documentWrite`.
* ...
* // Then resume parsing.
* resume();
* });
* });
*
* parser.end('<script src="example.com/script.js"></script>');
* ```
*/
on(event: 'script', listener: (scriptElement: AST.Element, documentWrite: (html: string) => void, resume: () => void) => void): this;
/**
* WritableStream events
*/
on(event: string, listener: Function): this;
}
// Plaint text conversion stream
//-----------------------------------------------------------------------------------
/**
* Converts plain text files into HTML document as required by [HTML specification](https://html.spec.whatwg.org/#read-text).
* A [writable stream](https://nodejs.org/api/stream.html#stream_class_stream_writable).
*
* @example
* ```js
*
* const parse5 = require('parse5');
* const fs = require('fs');
*
* const file = fs.createReadStream('war_and_peace.txt');
* const converter = new parse5.PlainTextConversionStream();
*
* converter.once('finish', () => {
* console.log(converter.document.childNodes[1].childNodes[0].tagName); //> 'head'
* });
*
* file.pipe(converter);
* ```
*/
export class PlainTextConversionStream extends ParserStream { }
// SAX parser
//-----------------------------------------------------------------------------------
/**
* Streaming [SAX](https://en.wikipedia.org/wiki/Simple_API_for_XML)-style HTML parser.
* A [transform stream](https://nodejs.org/api/stream.html#stream_class_stream_transform)
* (which means you can pipe *through* it, see example).
*
* @example
* ```js
*
* const parse5 = require('parse5');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('/home/google.com.html');
* const parser = new parse5.SAXParser();
*
* parser.on('text', text => {
* // Handle page text content
* ...
* });
*
* http.get('http://google.com', res => {
* // SAXParser is the Transform stream, which means you can pipe
* // through it. So, you can analyze page content and, e.g., save it
* // to the file at the same time:
* res.pipe(parser).pipe(file);
* });
* ```
*/
export class SAXParser extends stream.Transform {
/**
* @param options - Parsing options.
*/
constructor(options?: Options.SAXParserOptions);
/**
* Raised when the parser encounters a start tag.
*
* @param listener.name - Tag name.
* @param listener.attrs - List of attributes.
* @param listener.selfClosing - Indicates if the tag is self-closing.
* @param listener.location - Start tag source code location info. Available if location info is enabled via {@link Options.SAXParserOptions}.
*/
on(event: 'startTag', listener: (name: string, attrs: AST.Default.Attribute[], selfClosing: boolean, location?: MarkupData.StartTagLocation) => void): this;
/**
* Raised then parser encounters an end tag.
*
* @param listener.name - Tag name.
* @param listener.location - End tag source code location info. Available if location info is enabled via {@link Options.SAXParserOptions}.
*/
on(event: 'endTag', listener: (name: string, location?: MarkupData.Location) => void): this;
/**
* Raised then parser encounters a comment.
*
* @param listener.text - Comment text.
* @param listener.location - Comment source code location info. Available if location info is enabled via {@link Options.SAXParserOptions}.
*/
on(event: 'comment', listener: (text: string, location?: MarkupData.Location) => void): this;
/**
* Raised then parser encounters text content.
*
* @param listener.text - Text content.
* @param listener.location - Text content code location info. Available if location info is enabled via {@link Options.SAXParserOptions}.
*/
on(event: 'text', listener: (text: string, location?: MarkupData.Location) => void): this;
/**
* Raised then parser encounters a [document type declaration](https://en.wikipedia.org/wiki/Document_type_declaration).
*
* @param listener.name - Document type name.
* @param listener.publicId - Document type public identifier.
* @param listener.systemId - Document type system identifier.
* @param listener.location - Document type declaration source code location info. Available if location info is enabled via {@link Options.SAXParserOptions}.
*/
on(event: 'doctype', listener: (name: string, publicId: string, systemId: string, location?: MarkupData.Location) => void): this;
/**
* TransformStream events
*/
on(event: string, listener: Function): this;
/**
* Stops parsing. Useful if you want the parser to stop consuming CPU time once you've obtained the desired info
* from the input stream. Doesn't prevent piping, so that data will flow through the parser as usual.
*
* @example
* ```js
*
* const parse5 = require('parse5');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('google.com.html');
* const parser = new parse5.SAXParser();
*
* parser.on('doctype', (name, publicId, systemId) => {
* // Process doctype info ans stop parsing
* ...
* parser.stop();
* });
*
* http.get('http://google.com', res => {
* // Despite the fact that parser.stop() was called whole
* // content of the page will be written to the file
* res.pipe(parser).pipe(file);
* });
* ```
*/
stop(): void;
}
// Serializer stream
//-----------------------------------------------------------------------------------
/**
* Streaming AST node to an HTML serializer.
* A [readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable).
*
* @example
* ```js
*
* const parse5 = require('parse5');
* const fs = require('fs');
*
* const file = fs.createWriteStream('/home/index.html');
*
* // Serializes the parsed document to HTML and writes it to the file.
* const document = parse5.parse('<body>Who is John Galt?</body>');
* const serializer = new parse5.SerializerStream(document);
*
* serializer.pipe(file);
* ```
*/
export class SerializerStream extends stream.Readable {
/**
* Streaming AST node to an HTML serializer. A readable stream.
*
* @param node - Node to serialize.
* @param options - Serialization options.
*/
constructor(node: AST.Node, options?: Options.SerializerOptions);
} | the_stack |
import React, { Component } from 'react';
import * as d3 from 'd3';
import ReactFauxDOM from 'react-faux-dom';
import memoize from 'memoizee';
import { createBigNumber } from 'utils/create-big-number';
import { ASKS, BIDS, BUY, SELL, ZERO } from 'modules/common/constants';
import Styles from 'modules/market-charts/components/depth/depth.styles.less';
import { MarketDepth } from 'modules/markets/helpers/order-for-market-depth';
import { ZoomOutIcon, ZoomInIcon } from 'modules/common/icons';
interface DepthChartProps {
marketDepth: MarketDepth;
orderBookKeys: { max: BigNumber; min: BigNumber; mid: BigNumber };
pricePrecision: number;
updateHoveredPrice: Function;
updateHoveredDepth: Function;
updateSelectedOrderProperties: Function;
marketMin: BigNumber;
marketMax: BigNumber;
hasOrders: boolean;
hoveredPrice?: any;
}
interface DepthChartState {
zoom: number;
}
const ZOOM_LEVELS = [1, 0.8, 0.6, 0.4, 0.2];
const ZOOM_MAX = ZOOM_LEVELS.length - 1;
// this is important to make sure we don't infinitely redraw the chart / have the container keep growing
const MARGIN_OF_ERROR = 50;
const CHART_DIM = {
top: 10,
bottom: 20,
right: 10,
left: 10,
stick: 5,
tickOffset: 0,
};
const checkResize = memoize(
(clientWidth, clientHeight, containerWidth, containerHeight) =>
Math.abs(clientWidth + clientHeight - (containerWidth + containerHeight)) >
MARGIN_OF_ERROR
);
function determineInitialZoom(props) {
const { orderBookKeys, marketMin, marketMax } = props;
const midPrice = orderBookKeys.mid;
const minDistance = midPrice.minus(marketMin);
const maxDistance = marketMax.minus(midPrice);
const maxDistanceGreater = maxDistance.gt(minDistance);
const ZoomLevelArray = ZOOM_LEVELS.map(zoomLevel => {
const xDomainMin = maxDistanceGreater
? midPrice.minus(maxDistance * zoomLevel)
: midPrice.minus(minDistance * zoomLevel);
const xDomainMax = maxDistanceGreater
? midPrice.plus(maxDistance * zoomLevel)
: midPrice.plus(minDistance * zoomLevel);
return [xDomainMin, xDomainMax];
});
const zoom = ZoomLevelArray.findIndex(
ele => ele[0].gte(marketMin) && ele[1].lte(marketMax)
);
return zoom === -1 ? ZOOM_MAX : zoom;
}
export default class DepthChart extends Component<
DepthChartProps,
DepthChartState
> {
static defaultProps = {
hoveredPrice: null,
};
depthChart: any;
depthContainer: any;
drawParams: any;
xScale: number = 0;
yScale: number = 0;
containerHeight: number = 0;
containerWidth: number = 0;
constructor(props) {
super(props);
this.state = {
zoom: determineInitialZoom(props),
};
this.drawDepth = this.drawDepth.bind(this);
this.drawCrosshairs = this.drawCrosshairs.bind(this);
this.handleZoom = this.handleZoom.bind(this);
this.handleResize = this.handleResize.bind(this);
this.determineDrawParams = this.determineDrawParams.bind(this);
}
componentDidMount() {
const {
pricePrecision,
marketDepth,
marketMax,
marketMin,
orderBookKeys,
updateHoveredPrice,
updateSelectedOrderProperties,
hasOrders,
} = this.props;
this.drawDepth({
marketDepth,
orderBookKeys,
pricePrecision,
marketMin,
marketMax,
updateHoveredPrice,
updateSelectedOrderProperties,
hasOrders,
zoom: this.state.zoom,
});
window.addEventListener('resize', this.handleResize);
}
componentWillUnmount() {
window.removeEventListener('resize', this.handleResize);
}
UNSAFE_componentWillUpdate(nextProps, nextState) {
const { hoveredPrice, marketDepth, orderBookKeys } = this.props;
const oldZoom = this.state.zoom;
const { zoom } = nextState;
const curMarketDepth = JSON.stringify(marketDepth);
const nextMarketDepth = JSON.stringify(nextProps.marketDepth);
const handleChartDraw = () => this.drawDepth({
marketDepth: nextProps.marketDepth,
orderBookKeys: nextProps.orderBookKeys,
pricePrecision: nextProps.pricePrecision,
marketMin: nextProps.marketMin,
marketMax: nextProps.marketMax,
updateHoveredPrice: nextProps.updateHoveredPrice,
updateSelectedOrderProperties: nextProps.updateSelectedOrderProperties,
hasOrders: nextProps.hasOrders,
zoom,
});
if (
curMarketDepth !== nextMarketDepth ||
JSON.stringify(orderBookKeys) !==
JSON.stringify(nextProps.orderBookKeys) ||
oldZoom !== zoom
) {
handleChartDraw();
} else if (
checkResize(
this.depthChart.clientWidth,
this.depthChart.clientHeight,
this.containerWidth,
this.containerHeight
)
) {
handleChartDraw();
}
if (
hoveredPrice !== nextProps.hoveredPrice ||
curMarketDepth !== nextMarketDepth
) {
this.drawCrosshairs({
hoveredPrice: nextProps.hoveredPrice,
pricePrecision: nextProps.pricePrecision,
marketDepth: nextProps.marketDepth,
marketMin: nextProps.marketMin,
marketMax: nextProps.marketMax,
drawParams: this.drawParams,
});
}
}
determineDrawParams(options) {
const {
depthChart,
marketDepth,
marketMax,
marketMin,
orderBookKeys,
zoom,
} = options;
const containerHeight = this.containerHeight
? this.containerHeight
: this.depthChart.clientHeight;
const containerWidth = this.depthChart.clientWidth;
this.containerWidth = containerWidth;
this.containerHeight = containerHeight;
const drawHeight = containerHeight - CHART_DIM.bottom;
const midPrice = orderBookKeys.mid;
const minDistance = midPrice.minus(marketMin);
const maxDistance = marketMax.minus(midPrice);
const maxDistanceGreater = maxDistance.gt(minDistance);
const zoomLevel = ZOOM_LEVELS[zoom];
const scaledMaxDistance = maxDistance.times(zoomLevel);
const scaledMinDistance = minDistance.times(zoomLevel);
const xDomainMin = maxDistanceGreater
? midPrice.minus(scaledMaxDistance)
: midPrice.minus(scaledMinDistance);
const xDomainMax = maxDistanceGreater
? midPrice.plus(scaledMaxDistance)
: midPrice.plus(scaledMinDistance);
const yDomainMax = Object.keys(marketDepth)
.reduce((p, side) => {
const book = marketDepth[side];
if (book.length > 0) {
let firstFailingIndex = null;
let price = null;
if (side === BIDS) {
price = xDomainMin;
firstFailingIndex = book.findIndex(
ele => ele[3] && price.gte(createBigNumber(ele[1]))
);
} else {
price = xDomainMax;
firstFailingIndex = book.findIndex(
ele => ele[3] && price.lte(createBigNumber(ele[1]))
);
}
const LargestShareAmount = createBigNumber(
(book[firstFailingIndex - 1] && book[firstFailingIndex - 1][0]) ||
book[book.length - 1][0] ||
0
);
if (LargestShareAmount.gt(p)) return LargestShareAmount;
}
return p;
}, ZERO)
// .times(1.05)
.toNumber();
const xDomain = [xDomainMin.toNumber(), xDomainMax.toNumber()];
const yDomain = [0, yDomainMax];
const xScale = d3
.scaleLinear()
.domain(d3.extent(xDomain))
.range([CHART_DIM.left, containerWidth - CHART_DIM.right]);
const yScale = d3
.scaleLinear()
.clamp(true)
.domain(d3.extent(yDomain))
.range([drawHeight, CHART_DIM.top]);
const newMarketDepth = {
asks: [...marketDepth.asks],
bids: [...marketDepth.bids],
};
if (newMarketDepth.asks.length > 0 && marketMax) {
const askToCopy = newMarketDepth.asks[newMarketDepth.asks.length - 1];
if (askToCopy[1] !== marketMax.toNumber()) {
newMarketDepth.asks.push([
askToCopy[0],
marketMax.toNumber(),
askToCopy[2],
false,
]);
}
}
if (newMarketDepth.bids.length > 0 && marketMin) {
const bidToCopy = newMarketDepth.bids[newMarketDepth.bids.length - 1];
if (bidToCopy[1] !== marketMin.toNumber()) {
newMarketDepth.bids.push([
bidToCopy[0],
marketMin.toNumber(),
bidToCopy[2],
false,
]);
}
}
return {
containerWidth,
containerHeight,
drawHeight,
newMarketDepth,
xDomain,
yDomain,
xScale,
yScale,
};
}
drawDepth(options, cb = null) {
if (this.depthChart) {
const {
marketDepth,
orderBookKeys,
pricePrecision,
marketMin,
marketMax,
updateHoveredPrice,
updateSelectedOrderProperties,
hasOrders,
zoom,
} = options;
const drawParams = this.determineDrawParams({
marketDepth,
orderBookKeys,
pricePrecision,
marketMax,
marketMin,
zoom,
});
this.xScale = drawParams.xScale;
this.yScale = drawParams.yScale;
this.drawParams = drawParams;
const depthContainer = new ReactFauxDOM.Element('div');
const depthChart = d3
.select(depthContainer)
.style('display', 'flex')
.append('svg')
.attr('id', 'depth_chart')
.attr('width', drawParams.containerWidth)
.attr('height', drawParams.containerHeight);
drawTicks({
drawParams,
depthChart,
orderBookKeys,
pricePrecision,
marketMax,
marketMin,
hasOrders,
marketDepth: drawParams.newMarketDepth,
});
drawLines({
drawParams,
depthChart,
marketDepth: drawParams.newMarketDepth,
hasOrders,
marketMin,
marketMax,
});
setupCrosshairs({
drawParams,
depthChart,
});
attachHoverClickHandlers({
drawParams,
depthChart,
marketDepth,
orderBookKeys,
pricePrecision,
marketMin,
marketMax,
updateHoveredPrice,
updateSelectedOrderProperties,
});
this.drawCrosshairs({
hoveredPrice: this.props.hoveredPrice,
pricePrecision,
marketDepth,
marketMin,
marketMax,
drawParams,
});
this.depthContainer = depthContainer.toReact();
if (cb) cb();
}
}
drawCrosshairs(options) {
const { updateHoveredDepth } = this.props;
if (this.depthChart) {
const {
hoveredPrice,
marketDepth,
marketMin,
marketMax,
drawParams,
} = options;
const { xScale, yScale, containerHeight, containerWidth } = this;
if (hoveredPrice == null) {
d3.select('#crosshairs').style('display', 'none');
d3.select('#hovered_tooltip_container').style('display', 'none');
updateHoveredDepth([]);
} else {
const nearestFillingOrder = nearestCompletelyFillingOrder(
hoveredPrice,
marketDepth,
marketMin,
marketMax
);
if (nearestFillingOrder === null) return;
updateHoveredDepth(nearestFillingOrder);
d3.select('#crosshairs').style('display', null);
if (
createBigNumber(hoveredPrice).gte(marketMin) &&
createBigNumber(hoveredPrice).lte(marketMax)
) {
d3.select('#crosshairX')
.attr('x1', xScale(nearestFillingOrder[1]))
.attr('y1', 0)
.attr('x2', xScale(nearestFillingOrder[1]))
// @ts-ignore
.attr('y2', containerHeight - CHART_DIM.bottom)
.style('display', null);
} else {
d3.select('#crosshairX').style('display', 'none');
}
d3.select('#crosshairY')
.attr(
'x1',
nearestFillingOrder[4] === BIDS ? 0 : xScale(nearestFillingOrder[1])
)
.attr('y1', yScale(nearestFillingOrder[0]))
.attr(
'x2',
nearestFillingOrder[4] === BIDS
? xScale(nearestFillingOrder[1])
: containerWidth
)
.attr('y2', yScale(nearestFillingOrder[0]));
d3.select('#crosshairDot')
.attr('cx', xScale(nearestFillingOrder[1]))
.attr('cy', yScale(nearestFillingOrder[0]));
d3.select('#crosshairDotOutline')
.attr('cx', xScale(nearestFillingOrder[1]))
.attr('cy', yScale(nearestFillingOrder[0]));
}
}
}
handleZoom(direction: number) {
const { zoom } = this.state;
const newZoom = ZOOM_LEVELS[zoom + direction]
? ZOOM_LEVELS[zoom + direction]
: ZOOM_LEVELS[zoom];
if (ZOOM_LEVELS[zoom] !== newZoom) {
this.setState({
zoom: zoom + direction,
});
}
}
handleResize() {
const {
pricePrecision,
marketDepth,
marketMax,
marketMin,
orderBookKeys,
updateHoveredPrice,
updateSelectedOrderProperties,
hasOrders,
} = this.props;
this.drawDepth({
marketDepth,
orderBookKeys,
pricePrecision,
marketMin,
marketMax,
updateHoveredPrice,
updateSelectedOrderProperties,
hasOrders,
zoom: this.state.zoom,
});
}
render() {
const { zoom } = this.state;
return (
<div
ref={depthChart => {
this.depthChart = depthChart;
}}
className={Styles.MarketOutcomeDepth__container}
>
<button onClick={() => this.handleZoom(-1)} disabled={zoom === 0}>
{ZoomOutIcon}
</button>
<span>mid price</span>
<span>{`$${this.props.orderBookKeys.mid.toFixed()}`}</span>
<button onClick={() => this.handleZoom(1)} disabled={zoom === ZOOM_MAX}>
{ZoomInIcon}
</button>
{this.depthContainer}
</div>
);
}
}
function nearestCompletelyFillingOrder(
price,
{ asks = [], bids = [] },
marketMin,
marketMax,
drawParams
) {
const marketRange = createBigNumber(marketMax).minus(marketMin);
// const { xDomain } = drawParams;
const PRICE_INDEX = 1;
const items = [
...asks.filter(it => it[3]).map(it => [...it, ASKS]),
...bids.filter(it => it[3]).map(it => [...it, BIDS]),
];
let closestIndex = -1;
let closestDistance = Number.MAX_VALUE;
for (let i = 0; i < items.length; i++) {
// @ts-ignore
const dist = Math.abs(items[i][PRICE_INDEX] - price);
if (dist < closestDistance) {
closestIndex = i;
closestDistance = dist;
}
}
if (closestIndex !== -1) {
let cost = ZERO;
const type = items[closestIndex][4];
for (let i = closestIndex; items[i] && items[i][4] === type; i--) {
const scaledPrice = createBigNumber(items[i][1]).minus(marketMin);
const long = createBigNumber(items[i][2]).times(scaledPrice);
const tradeCost =
type === ASKS
? long
: marketRange.times(items[i][2].toString()).minus(long.toString());
cost = cost.plus(tradeCost);
}
// @ts-ignore
items[closestIndex].push(cost);
} else {
return null;
}
// final check to make sure not to snap to invisible prices
if (drawParams) {
const { xDomain } = drawParams;
const price = Number(items[closestIndex][1]);
if (items[closestIndex][4] === BIDS && price < xDomain[0]) {
return null;
} else if (items[closestIndex][4] === ASKS && price > xDomain[1]) {
return null;
}
}
return items[closestIndex];
}
function drawTicks(options) {
const {
drawParams,
depthChart,
orderBookKeys,
marketMax,
marketMin,
hasOrders,
marketDepth,
} = options;
// Chart Bounds
depthChart
.append('g')
.attr('id', 'depth_chart_bounds')
.selectAll('line')
.data(new Array(2))
.enter()
.append('line')
.attr('class', 'bounding-line')
.attr('x1', 0)
.attr('x2', drawParams.containerWidth)
.attr('y1', (d, i) => (drawParams.containerHeight - CHART_DIM.bottom) * i)
.attr('y2', (d, i) => (drawParams.containerHeight - CHART_DIM.bottom) * i);
// Midpoint line
if (hasOrders && marketDepth.bids.length > 0 && marketDepth.asks.length > 0) {
depthChart
.append('line')
.attr('class', 'tick-line--midpoint')
.attr('x1', drawParams.xScale(orderBookKeys.mid.toNumber()))
.attr('y1', 0)
.attr('x2', drawParams.xScale(orderBookKeys.mid.toNumber()))
.attr('y2', drawParams.containerHeight - CHART_DIM.bottom);
}
const tickCount = 5;
// Draw LeftSide yAxis
if (hasOrders) {
const yTicks = depthChart.append('g').attr('id', 'depth_y_ticks');
yTicks
.call(
d3
.axisRight(drawParams.yScale)
.tickValues(drawParams.yScale.ticks(tickCount))
.tickSize(9)
.tickPadding(4)
)
.attr('transform', `translate(-${CHART_DIM.left}, 6)`)
.selectAll('text')
.text(d => d)
.select('path')
.remove();
}
// X Axis
let hasAddedMin = false;
let hasAddedMax = false;
const { length } = drawParams.xScale.ticks(tickCount);
const xTicks = drawParams.xScale.ticks(tickCount).reduce((acc, tickValue) => {
// min check
if (marketMin.eq(tickValue) || (!hasAddedMin && marketMin.lt(tickValue))) {
hasAddedMin = true;
acc.push(marketMin.toNumber());
}
// max check
if (marketMax.eq(tickValue) || (!hasAddedMax && marketMax.lt(tickValue))) {
hasAddedMax = true;
acc.push(marketMax.toNumber());
}
if (marketMin.lt(tickValue) && marketMax.gt(tickValue)) acc.push(tickValue);
// final max check (make sure we add max if this is the last tickValue)
if (acc.length === length && !hasAddedMax) {
hasAddedMax = true;
acc.push(marketMax.toNumber());
}
return acc;
}, []);
depthChart
.append('g')
.attr('id', 'depth-x-axis')
.attr(
'transform',
`translate( 0, ${drawParams.containerHeight - CHART_DIM.bottom})`
)
.call(
d3
.axisBottom(drawParams.xScale)
.tickValues(xTicks)
.tickSize(9)
.tickPadding(4)
)
.selectAll('text')
.text(d => `$${d}`)
.select('path')
.remove();
// Draw xAxis lines
drawParams.xScale.ticks(tickCount).forEach((tick: number) => {
if (tick === drawParams.xScale.ticks(tickCount)[0]) {
return;
}
depthChart
.append('line')
.attr('class', 'vertical-lines')
.attr('x1', drawParams.xScale(tick))
.attr('y1', CHART_DIM.tickOffset)
.attr('x2', drawParams.xScale(tick))
.attr('y2', drawParams.containerHeight - CHART_DIM.bottom);
});
// Draw yAxis Lines
drawParams.yScale.ticks(tickCount).forEach((tick: number) => {
if (tick === drawParams.yScale.ticks(tickCount)[0]) {
return;
}
depthChart
.append('line')
.attr('class', 'horizontal-lines')
.attr('x1', CHART_DIM.tickOffset)
.attr('y1', drawParams.yScale(tick))
.attr('x2', drawParams.containerWidth - CHART_DIM.right)
.attr('y2', drawParams.yScale(tick));
});
// Draw RightSide yAxis
if (hasOrders) {
const yTicks2 = depthChart.append('g').attr('id', 'depth_y_ticks');
yTicks2
.call(
d3
.axisLeft(drawParams.yScale)
.tickValues(drawParams.yScale.ticks(tickCount))
.tickSize(9)
.tickPadding(4)
)
.attr(
'transform',
`translate(${drawParams.containerWidth + CHART_DIM.right}, 6)`
)
.selectAll('text')
.text(d => d)
.select('path')
.remove();
}
}
function drawLines(options) {
const {
drawParams,
depthChart,
marketDepth,
hasOrders,
marketMin,
marketMax,
} = options;
// Defs
const chartDefs = depthChart.append('defs');
// Fills
const subtleGradientBid = chartDefs
.append('linearGradient')
.attr('id', 'subtleGradientBid')
.attr('x1', 0)
.attr('y1', 1)
.attr('x2', 0)
.attr('y2', 0);
subtleGradientBid
.append('stop')
.attr('class', 'stop-top-bid')
.attr('offset', '0');
subtleGradientBid
.append('stop')
.attr('class', 'stop-bottom-bid')
.attr('offset', '1');
const subtleGradientAsk = chartDefs
.append('linearGradient')
.attr('x1', 0)
.attr('y1', 0)
.attr('x2', 0)
.attr('y2', 1)
.attr('id', 'subtleGradientAsk');
subtleGradientAsk
.append('stop')
.attr('class', 'stop-bottom-ask')
.attr('offset', '0');
subtleGradientAsk
.append('stop')
.attr('class', 'stop-top-ask')
.attr('offset', '10');
if (!hasOrders) return;
// Depth Line
const depthLine = d3
.line()
.curve(d3.curveStepBefore)
.x(d => drawParams.xScale(d[1]))
.y(d => drawParams.yScale(d[0]));
Object.keys(marketDepth).forEach(side => {
depthChart
.append('path')
.data([marketDepth[side].reverse()])
.attr('class', `depth-line-${side} outcome-line-${side}`)
.attr('d', depthLine);
});
const areaBid = d3
.area()
.curve(d3.curveStepBefore)
.x0(d => drawParams.xScale(d[1]))
.x1(d => drawParams.xScale(marketMin))
.y(d => drawParams.yScale(d[0]));
const areaAsk = d3
.area()
.curve(d3.curveStepBefore)
.x0(d => drawParams.xScale(d[1]))
.x1(d => drawParams.xScale(marketMax))
.y(d => drawParams.yScale(d[0]));
Object.keys(marketDepth).forEach(side => {
depthChart
.append('path')
.data([marketDepth[side]])
.classed(`filled-subtle-${side}`, true)
.attr('d', side === BIDS ? areaBid : areaAsk);
});
}
function setupCrosshairs(options) {
const { depthChart } = options;
// create tooltip
const tooltip = depthChart
.append('foreignObject')
.attr('id', 'hovered_tooltip_container')
.style('display', 'none');
const tooltipDiv = tooltip
.append('div')
.attr('id', 'hovered_tooltip')
.attr('class', 'hovered_tooltip_div');
const labels = tooltipDiv
.append('div')
.attr('id', 'hovered_tooltip_labels')
.attr('class', 'hovered_tooltip_labels');
const values = tooltipDiv
.append('div')
.attr('id', 'hovered_tooltip_values')
.attr('class', 'hovered_tooltip_values');
labels
.append('div')
.attr('id', 'price_label')
.html('Price');
labels
.append('div')
.attr('id', 'volume_label')
.html('Volume');
labels
.append('div')
.attr('id', 'cost_label')
.html('Cost');
values.append('div').attr('id', 'price_value');
values.append('div').attr('id', 'volume_value');
values.append('div').attr('id', 'cost_value');
// create crosshairs
const crosshair = depthChart
.append('g')
.attr('id', 'crosshairs')
.attr('class', 'line')
.style('display', 'none');
crosshair
.append('svg:circle')
.attr('id', 'crosshairDot')
.attr('r', 3)
.attr('stroke', '#E9E5F2')
.attr('fill', '#E9E5F2')
.attr('class', 'crosshairDot');
crosshair
.append('svg:circle')
.attr('id', 'crosshairDotOutline')
.attr('r', 8)
.attr('stroke', '#E9E5F2')
.attr('fill', '#E9E5F2')
.attr('class', 'crosshairDotOutline');
// X Crosshair
crosshair
.append('line')
.attr('id', 'crosshairX')
.attr('class', 'crosshair')
.style('display', 'none');
// Y Crosshair
crosshair
.append('line')
.attr('id', 'crosshairY')
.attr('class', 'crosshair');
}
function attachHoverClickHandlers(options) {
const {
drawParams,
depthChart,
marketDepth,
pricePrecision,
marketMin,
marketMax,
updateHoveredPrice,
updateSelectedOrderProperties,
} = options;
depthChart
.append('rect')
.attr('class', 'overlay')
.attr('width', drawParams.containerWidth)
.attr('height', drawParams.containerHeight)
.on('mouseover', () => d3.select('#crosshairs').style('display', null))
.on('mouseout', () => {
updateHoveredPrice(null);
d3.select('.depth-line-bids').attr('stroke-width', 1);
d3.select('.depth-line-asks').attr('stroke-width', 1);
d3.select('#crosshairX').attr('class', 'crosshair');
d3.select('#crosshairY').attr('class', 'crosshair');
})
.on('mousemove', () => {
const mouse = d3.mouse(d3.select('#depth_chart').node());
const asksDepthLine = '.depth-line-asks';
const bidsDepthLine = '.depth-line-bids';
// Determine closest order
const hoveredPrice = drawParams.xScale
.invert(mouse[0])
.toFixed(pricePrecision);
const nearestFillingOrder = nearestCompletelyFillingOrder(
hoveredPrice,
marketDepth,
marketMin,
marketMax,
drawParams
);
if (nearestFillingOrder === null) return;
d3.select(bidsDepthLine).attr(
'stroke-width',
nearestFillingOrder[4] === ASKS ? 1 : 2
);
d3.select(asksDepthLine).attr(
'stroke-width',
nearestFillingOrder[4] === ASKS ? 2 : 1
);
d3.select('#crosshairX').attr(
'class',
`crosshair-${nearestFillingOrder[4]}`
);
d3.select('#crosshairY').attr(
'class',
`crosshair-${nearestFillingOrder[4]}`
);
updateHoveredPrice(hoveredPrice);
const { xScale, yScale } = drawParams;
d3.select('#price_label').attr('class', `${nearestFillingOrder[4]}`);
d3.select('#volume_label').attr('class', `${nearestFillingOrder[4]}`);
d3.select('#cost_label').attr('class', `${nearestFillingOrder[4]}`);
d3.select('#price_value').html(
`$${createBigNumber(nearestFillingOrder[1]).toFixed(pricePrecision)}`
);
d3.select('#volume_value').html(
`${createBigNumber(nearestFillingOrder[0]).toFixed(pricePrecision)}`
);
d3.select('#cost_value').html(
// @ts-ignore
`$${nearestFillingOrder[5].toFixed(pricePrecision)}`
);
// 27 comes from the padding/border/margins so 1rem total for horz
// padding .5 rem for label/value seperation, + borderpx of 3 (2 on line
// side, 1 on the other)
// const defaultHeight = 51;
// const defaultWidth = 113;
const borderPadding = 2;
const verticalSpacing = 24;
const testWidth =
d3.select('#hovered_tooltip_values').node().clientWidth +
d3.select('#hovered_tooltip_labels').node().clientWidth +
27 +
borderPadding;
const testHeight =
d3.select('#hovered_tooltip').node().clientHeight + verticalSpacing;
let quarterX = xScale(drawParams.xDomain[1] * 0.1);
let quarterY = yScale(drawParams.yDomain[1] * 0.9);
quarterX = quarterX > testWidth ? quarterX : testWidth;
quarterY = quarterY > testHeight ? quarterY : testHeight;
const flipX = quarterX > xScale(nearestFillingOrder[1]);
const flipY = quarterY > yScale(nearestFillingOrder[0]);
d3.select('#hovered_tooltip').attr(
'class',
`hovered_tooltip_div ${nearestFillingOrder[4]} ${flipX ? 'flip' : ''}`
);
const offset = {
hoverToolTipX: flipX ? 0 : testWidth * -1,
hoverToolTipY: flipY ? verticalSpacing : testHeight * -1,
};
const tooltip = d3
.select('#hovered_tooltip_container')
.style('display', 'flex')
.style('height', testHeight)
.style('width', testWidth);
tooltip
.attr('x', xScale(nearestFillingOrder[1]) + offset.hoverToolTipX)
.attr('y', yScale(nearestFillingOrder[0]) + offset.hoverToolTipY);
})
.on('click', () => {
const mouse = d3.mouse(d3.select('#depth_chart').node());
const orderPrice = drawParams.xScale
.invert(mouse[0])
.toFixed(pricePrecision);
const nearestFillingOrder = nearestCompletelyFillingOrder(
orderPrice,
marketDepth,
marketMin,
marketMax,
drawParams
);
if (
nearestFillingOrder != null &&
createBigNumber(orderPrice).gte(marketMin) &&
createBigNumber(orderPrice).lte(marketMax)
) {
updateSelectedOrderProperties({
orderQuantity: nearestFillingOrder[0],
orderPrice: nearestFillingOrder[1],
selectedNav: nearestFillingOrder[4] === BIDS ? SELL : BUY,
});
}
});
} | the_stack |
import { strict as assert } from "assert";
import fs from "fs";
import path from "path";
import { ZonedDateTime } from "@js-joda/core";
import got, {
Response as GotResponse,
OptionsOfTextResponseBody as GotOptions
} from "got";
import { test, suite, suiteSetup } from "mocha";
import yaml from "js-yaml";
import { httpcl, mtimeDeadlineInSeconds, mtime } from "@opstrace/utils";
import {
log,
globalTestSuiteSetupOnce,
enrichHeadersWithAuthToken,
httpTimeoutSettings,
logHTTPResponse,
timestampToNanoSinceEpoch,
CLUSTER_BASE_URL,
OPSTRACE_INSTANCE_DNS_NAME,
TEST_REMOTE_ARTIFACT_DIRECTORY,
CI_LOGIN_EMAIL,
CI_LOGIN_PASSWORD
} from "./testutils";
// Set debug mode for playwright, before importing it
// this does not work, too late probably. set via Makefile entrypoint
// process.env.DEBUG = "pw:api";
import type {
ChromiumBrowser,
Cookie,
BrowserContext,
Page,
ConsoleMessage
} from "playwright";
import { chromium } from "playwright";
import { sleep } from "@opstrace/utils";
let BROWSER: ChromiumBrowser;
// Generate and return a path in the artifact directory
function artipath(filename: string) {
return path.join(TEST_REMOTE_ARTIFACT_DIRECTORY, filename);
}
// A data structure like this:
// 2021-05-05T12:16:05.105Z info: cookies:
// [
// {
// "sameSite": "None",
// "name": "_legacy_auth0.is.authenticated",
// "value": "true",
// "domain": "jpload-1620131593.opstrace.io",
// "path": "/",
// "expires": 1620303358,
// "httpOnly": false,
// "secure": true
// },
// {
// ...
// ]
let COOKIES_AFTER_LOGIN: Cookie[] | undefined;
async function performLoginFlowCore(page: Page) {
// Auth0-based login flow can be slow, increase timeout. Goal:
// Make login flow faster, reduce timeout again here.
page.setDefaultNavigationTimeout(60_000);
page.setDefaultTimeout(60_000);
log.info("page.goto(%s)", CLUSTER_BASE_URL);
await page.goto(CLUSTER_BASE_URL);
log.info("`load` event");
// This is supposed to wait for the "log in" button. Specifically, this:
// <button class="MuiButtonBase-root Mui... MuiButton-sizeLarge" tabindex="0" type="button">
// <span class="MuiButton-label">Log in</span>
log.info('page.waitForSelector("css=button")');
await page.waitForSelector("css=button");
log.info("page.screenshot(), with login button");
await page.screenshot({
path: artipath("uishot-rootpage.png")
});
log.info('page.click("text=Log in")');
await page.click("text=Log in");
// Wait for CI-specific username/pw login form to appear
await page.waitForSelector("text=Don't remember your password?");
await page.fill("css=input[type=email]", CI_LOGIN_EMAIL);
await page.fill("css=input[type=password]", CI_LOGIN_PASSWORD);
await page.screenshot({
path: artipath("uishot-auth0-login-page.png")
});
await page.click("css=button[type=submit]");
// The first view after successful login is expected to be the details page
// for the `system` tenant, showing a link to Grafana.
await page.waitForSelector("text=Getting Started");
await page.screenshot({
path: artipath("uishot-after-auth0-login.png")
});
}
function httpClientOptsWithCookie(cookie_header_value: string) {
return {
throwHttpErrors: false,
timeout: {
connect: 5000,
request: 30000
},
headers: {
Cookie: cookie_header_value
},
https: { rejectUnauthorized: false }
} as GotOptions;
}
async function waitForResp(
url: string,
httpopts: any,
expectedStatusCode = 200
): Promise<GotResponse<string>> {
const maxWaitSeconds = 2100;
const deadline = mtimeDeadlineInSeconds(maxWaitSeconds);
log.info(
"Waiting for a response with status code %s to be returned by %s",
expectedStatusCode,
url
);
while (true) {
if (mtime() > deadline) {
throw new Error(`Expectation not fulfilled within ${maxWaitSeconds} s`);
}
let resp: GotResponse<string> | undefined;
try {
resp = await httpcl(url, httpopts);
} catch (err: any) {
log.info(`request failed: ${err}`);
}
if (resp !== undefined) {
logHTTPResponse(resp);
if (resp.statusCode === expectedStatusCode) {
log.info(
"got response with expected status code %s",
expectedStatusCode
);
return resp;
}
}
log.info("outer retry: try again in 10 s");
await sleep(10);
}
}
async function performLoginFlowWithRetry(browsercontext: BrowserContext) {
log.info("context.newPage()");
// Assume that newPage() call 'never' fails as of transient issues (don't put
// into retry logic)
const page = await browsercontext.newPage();
const consoleLines: string[] = [];
page.on("console", (message: ConsoleMessage) => {
consoleLines.push(message.text());
});
const maxWaitSeconds = 200;
const deadline = mtimeDeadlineInSeconds(maxWaitSeconds);
log.info(`Trying to log in with retries, for at most ${maxWaitSeconds} s`);
let attempt = 0;
while (true) {
attempt += 1;
if (mtime() > deadline) {
throw new Error(`Login did not succeed within ${maxWaitSeconds} s`);
}
// <button class="MuiButtonBase-root Mui... MuiButton-sizeLarge" tabindex="0" type="button">
// <span class="MuiButton-label">Log in</span>
log.info(`login attempt ${attempt}`);
try {
await performLoginFlowCore(page);
break;
} catch (err: any) {
// It's difficult to find the 'right' way to catch "all" errors emitted
// by Playwright. There is playwright.errors but it only knows
// TimeoutError. What all errors seem to have in common that I have seen
// so far is that they show the method name of the method that threw the
// errors. In the function being called above this is always `page....`,
// .e.g `page.waitForSelector: Timeout 60000ms exceeded.` or `page.goto:
// net::ERR_NETWORK_CHANGED at...`. Filter for that.
if (!err.message.includes("page.")) {
// This does not seem to be thrown by playwright, might be a
// programming error. Don't consider retryable, re-throw.
log.info("re-throw what appears to be a non-playwright error");
throw err;
}
log.info(
`page.screenshot() because of what seems to be a playwright err: ${err}`
);
const p = artipath(`uishot-login-err-attempt-${attempt}.png`);
try {
await page.screenshot({ path: p });
} catch (innerErr: any) {
log.warning(`ignoring error in error handler: ${innerErr}`);
}
log.info(`wrote screenshot to ${p}`);
const consoleText = consoleLines.join("\n");
const fp = artipath(`browser-console-login-err-attempt-${attempt}.log`);
fs.writeFileSync(fp, consoleText, { encoding: "utf-8" });
log.info(`wrote browser console content to to ${fp}`);
}
const consoleText = consoleLines.join("\n");
const fp = artipath(`browser-console-after-login-success.log`);
fs.writeFileSync(fp, consoleText, { encoding: "utf-8" });
log.info(`wrote browser console content to to ${fp}`);
log.info("try again in 10 s");
await sleep(20);
}
log.info(
"seems like the login flow succeeded -- look up cookies to retain authentication state"
);
const cookies = await browsercontext.cookies(CLUSTER_BASE_URL);
log.info("cookies:\n%s", JSON.stringify(cookies, null, 2));
// expose globally
COOKIES_AFTER_LOGIN = cookies;
}
suite("test_ui_api", function () {
suiteSetup(async function () {
log.info("suite setup");
globalTestSuiteSetupOnce();
log.info("chromium.launch()");
BROWSER = await chromium.launch({
// headless: false, // set to false to see browser on your desktop
args: [
// https://github.com/microsoft/playwright/blob/761bd78879c83ed810ae38ef39513b2d874badb1/docs/ci.md#docker
"--disable-dev-shm-usage",
// https://github.com/microsoft/playwright/issues/4761
"--disable-gpu"
]
});
log.info("browser.newContext()");
await performLoginFlowWithRetry(
await BROWSER.newContext({ ignoreHTTPSErrors: true })
);
// Perform login flow in setup, so that the authentications state (namely,
// cookies), can be re-used in individual tests.
log.info("suite setup done");
});
suiteTeardown(async function () {
log.info("suite teardown");
log.info("browser.close()");
await BROWSER.close();
log.info("suite teardown done");
});
test("test_grafana_datasource_proxy_loki_get_labels", async function () {
assert(COOKIES_AFTER_LOGIN);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
// This relies on proxy/2 pointing to Loki. Depends on the order of
// data sources defined in grafanaDatasources.ts
const url = `https://system.${OPSTRACE_INSTANCE_DNS_NAME}/grafana/api/datasources/proxy/2/loki/api/v1/label`;
const ts = ZonedDateTime.now();
// Allow for testing clusters that started a couple of days ago
const searchStart = ts.minusHours(100);
const searchEnd = ts.plusHours(1);
const queryParams = {
start: timestampToNanoSinceEpoch(searchStart),
end: timestampToNanoSinceEpoch(searchEnd)
};
const httpopts = httpClientOptsWithCookie(cookie_header_value);
httpopts.searchParams = new URLSearchParams(queryParams);
const resp = await waitForResp(url, httpopts);
const r = JSON.parse(resp.body);
if (r.data !== undefined) {
const labels = r.data as Array<string>;
log.info("r.data: %s", r.data);
if (labels.includes("k8s_container_name")) {
log.info("found `k8s_container_name` label, success");
return;
}
}
throw new Error("unexpected response");
});
test("test_grafana_datasource_proxy_cortex_get_rules_legacy", async function () {
assert(COOKIES_AFTER_LOGIN);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
// This relies on proxy/1 pointing to Cortex. Depends on the order of
// data sources defined in grafanaDatasources.ts
// Grafana 8 accesses /rules of the Cortex ruler
// "GET /rules HTTP/1.1" 404 21 "-" "Grafana/8.0.0"
const url = `https://system.${OPSTRACE_INSTANCE_DNS_NAME}/grafana/api/datasources/proxy/1/rules`;
const resp = await waitForResp(
url,
httpClientOptsWithCookie(cookie_header_value)
);
log.info("got rules doc (first 500 chars): %s", resp.body.slice(0, 500));
});
test("test_grafana_datasource_proxy_loki_get_rules", async function () {
assert(COOKIES_AFTER_LOGIN);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
// This relies on proxy/2 pointing to Loki. Depends on the order of
// data sources defined in grafanaDatasources.ts
// Documented with "List all rules configured for the authenticated tenant"
const url = `https://system.${OPSTRACE_INSTANCE_DNS_NAME}/grafana/api/datasources/proxy/2/loki/api/v1/rules`;
// expect 404 response with 'no rule groups found' in body
const resp = await waitForResp(
url,
httpClientOptsWithCookie(cookie_header_value),
404
);
log.info("got resp with body: %s", resp.body);
if (resp.body.includes("no rule groups found")) {
log.info("saw expected response body, success");
return;
}
throw new Error("unexpected body in 404 response");
});
test("test_grafana_datasource_proxy_loki_get_alerts_legacy", async function () {
assert(COOKIES_AFTER_LOGIN);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
// This relies on proxy/2 pointing to Loki. Depends on the order of
// data sources defined in grafanaDatasources.ts
// GET /prometheus/api/v1/alerts
// Prometheus-compatible rules endpoint to list all active alerts.
const url = `https://system.${OPSTRACE_INSTANCE_DNS_NAME}/grafana/api/datasources/proxy/2/prometheus/api/v1/alerts`;
const resp = await waitForResp(
url,
httpClientOptsWithCookie(cookie_header_value)
);
log.info("got alerts doc: %s", resp.body);
});
test("create_tenant_and_use_custom_authn_token", async function () {
// Note(JP): pragmatic first step: synthetically emit HTTP request with
// `got`, do not actually let the browser emit it. That was easier to
// implement than navigating the browser (complications: the PLUS icon for
// tenants does not have a distinct css class or property set, etc). Use
// the suiteSetup() here to obtain valid authentication state, and then
// reuse that state by sending all relevant cookies. Note that technically
// the test_ui.ts was/is meant to do only _actual_ browser interaction.
// This test here isn't doing that. It's in here for now for dependency
// management: using the state created by setupSuite(). It's also in here
// so that it can be replaced by the UI-based flow for creating the tenant!
assert(COOKIES_AFTER_LOGIN);
if (process.env.TENANT_RND_NAME_FOR_TESTING_ADD_TENANT === undefined) {
throw new Error(
"env var TENANT_RND_NAME_FOR_TESTING_ADD_TENANT is not defined but required"
);
}
if (process.env.TENANT_RND_AUTHTOKEN === undefined) {
throw new Error(
"env var TENANT_RND_AUTHTOKEN is not defined but required"
);
}
const tenantName = process.env.TENANT_RND_NAME_FOR_TESTING_ADD_TENANT;
const tenantAuthToken = process.env.TENANT_RND_AUTHTOKEN;
log.info("create tenant with name %s", tenantName);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
log.info("cookie header value: %s", cookie_header_value);
const url = `${CLUSTER_BASE_URL}/_/graphql`;
const headers = {
"Content-Type": "application/json",
Cookie: cookie_header_value
};
const bodyObj = {
query:
"mutation CreateTenants($tenants: [tenant_insert_input!]!) {\n insert_tenant(objects: $tenants) {\n returning {\n name\n }\n }\n}\n",
variables: {
tenants: [
{
name: tenantName
}
]
}
};
const response = await got.post(url, {
body: JSON.stringify(bodyObj, null, 2),
throwHttpErrors: false,
headers: headers,
timeout: httpTimeoutSettings,
https: { rejectUnauthorized: false } // skip tls cert verification
});
logHTTPResponse(response);
assert.strictEqual(response.statusCode, 200);
// GraphQL: 200 OK response does not mean that no error happened.
const rdata = JSON.parse(response.body);
assert(rdata.errors === undefined);
log.info("using tenant auth token: %s", tenantAuthToken);
const httpopts: GotOptions = {
method: "GET",
// Some HTTP error responses are expected. Do this handling work manually.
throwHttpErrors: false,
headers: { Authorization: `Bearer ${tenantAuthToken}` },
timeout: httpTimeoutSettings,
https: { rejectUnauthorized: false } // skip tls cert verification
};
const maxWaitSeconds = 2100;
const deadline = mtimeDeadlineInSeconds(maxWaitSeconds);
log.info(
"Waiting for API for new tenant to become available, deadline in %ss",
maxWaitSeconds
);
while (true) {
if (mtime() > deadline) {
throw new Error(`Expectation not fulfilled within ${maxWaitSeconds} s`);
}
let resp: GotResponse<string> | undefined;
try {
resp = await httpcl(
`https://cortex.${tenantName}.${OPSTRACE_INSTANCE_DNS_NAME}/api/v1/labels`,
httpopts
);
} catch (err: any) {
log.info(`request failed: ${err}`);
}
if (resp !== undefined) {
logHTTPResponse(resp);
if (resp.statusCode === 200) {
return;
}
}
log.info("outer retry: try again in 10 s");
await sleep(10);
}
});
test("test_submit_cortex_runtime_config", async function () {
// Same consideration as above: this test is here in this module for now
// just because it's easy to use the authentication state after actual
// UI-based login.
assert(COOKIES_AFTER_LOGIN);
const cookie_header_value = COOKIES_AFTER_LOGIN.map(
c => `${c.name}=${c.value}`
).join("; ");
log.info("cookie header value: %s", cookie_header_value);
const url = `${CLUSTER_BASE_URL}/_/cortex/runtime_config`;
const headers = {
"Content-Type": "text/plain", // for bodyParser.text() express middleware
Cookie: cookie_header_value
};
const bodyObj = {
overrides: {
tenantnamefoo: {
ingestion_rate: 10000
},
tenantnamebar: {
ingestion_rate: 10000
}
}
};
const response = await got.post(url, {
body: yaml.dump(bodyObj),
throwHttpErrors: false,
headers: headers,
timeout: httpTimeoutSettings,
https: { rejectUnauthorized: false } // skip tls cert verification
});
logHTTPResponse(response);
const maxWaitSeconds = 240;
const deadline = mtimeDeadlineInSeconds(maxWaitSeconds);
log.info(
"Waiting for Cortex runtime config to reflect change, deadline in %ss",
maxWaitSeconds
);
const cortexRuntimeCfgUrl = `https://cortex.system.${OPSTRACE_INSTANCE_DNS_NAME}/runtime_config`; //?mode=diff`;
// This is exposed through a proxy to
//const cortexRuntimeCfgUrl = `${CLUSTER_BASE_URL}/_/cortex/runtime_config`;
const httpopts: GotOptions = {
method: "GET",
// Some HTTP error responses are expected. Do this handling work manually.
throwHttpErrors: false,
headers: enrichHeadersWithAuthToken(cortexRuntimeCfgUrl, {}),
//headers: { Cookie: cookie_header_value },
timeout: httpTimeoutSettings,
https: { rejectUnauthorized: false } // skip tls cert verification
};
while (true) {
if (mtime() > deadline) {
throw new Error(`Expectation not fulfilled within ${maxWaitSeconds} s`);
}
let resp: GotResponse<string> | undefined;
try {
resp = await httpcl(cortexRuntimeCfgUrl, httpopts);
} catch (err: any) {
log.info(`request failed: ${err}`);
}
if (resp !== undefined) {
logHTTPResponse(resp);
if (resp.body.includes("tenantnamebar")) {
log.info('found "tenantnamebar" in response body:\n%s', resp.body);
log.info("success criterion, leave waiting loop");
return;
} else {
log.debug(`response: ${resp.body}`);
log.info('response does not yet contain "tenantnamebar"');
}
}
log.info("outer retry: try again in 10 s");
await sleep(10);
}
});
}); | the_stack |
import { BaseResource, CloudError } from "ms-rest-azure";
import * as moment from "moment";
export {
BaseResource,
CloudError
};
/**
* Describes the properties of a Compute Operation value.
*/
export interface ComputeOperationValue {
/**
* The origin of the compute operation.
*/
readonly origin?: string;
/**
* The name of the compute operation.
*/
readonly name?: string;
/**
* The display name of the compute operation.
*/
readonly operation?: string;
/**
* The display name of the resource the operation applies to.
*/
readonly resource?: string;
/**
* The description of the operation.
*/
readonly description?: string;
/**
* The resource provider for the operation.
*/
readonly provider?: string;
}
/**
* Instance view status.
*/
export interface InstanceViewStatus {
/**
* The status code.
*/
code?: string;
/**
* The level code. Possible values include: 'Info', 'Warning', 'Error'
*/
level?: string;
/**
* The short localizable label for the status.
*/
displayStatus?: string;
/**
* The detailed status message, including for alerts and error messages.
*/
message?: string;
/**
* The time of the status.
*/
time?: Date;
}
export interface SubResource extends BaseResource {
/**
* Resource Id
*/
id?: string;
}
/**
* Describes a virtual machine scale set sku.
*/
export interface Sku {
/**
* The sku name.
*/
name?: string;
/**
* Specifies the tier of virtual machines in a scale set.<br /><br /> Possible Values:<br /><br
* /> **Standard**<br /><br /> **Basic**
*/
tier?: string;
/**
* Specifies the number of virtual machines in the scale set.
*/
capacity?: number;
}
/**
* The Resource model definition.
*/
export interface Resource extends BaseResource {
/**
* Resource Id
*/
readonly id?: string;
/**
* Resource name
*/
readonly name?: string;
/**
* Resource type
*/
readonly type?: string;
/**
* Resource location
*/
location: string;
/**
* Resource tags
*/
tags?: { [propertyName: string]: string };
}
/**
* Specifies information about the availability set that the virtual machine should be assigned to.
* Virtual machines specified in the same availability set are allocated to different nodes to
* maximize availability. For more information about availability sets, see [Manage the
* availability of virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
* <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual
* machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM
* cannot be added to an availability set.
*/
export interface AvailabilitySet extends Resource {
/**
* Update Domain count.
*/
platformUpdateDomainCount?: number;
/**
* Fault Domain count.
*/
platformFaultDomainCount?: number;
/**
* A list of references to all virtual machines in the availability set.
*/
virtualMachines?: SubResource[];
/**
* The resource status information.
*/
readonly statuses?: InstanceViewStatus[];
/**
* Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for
* possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic'
* for virtual machines with unmanaged disks. Default value is 'Classic'.
*/
sku?: Sku;
}
/**
* The Update Resource model definition.
*/
export interface UpdateResource extends BaseResource {
/**
* Resource tags
*/
tags?: { [propertyName: string]: string };
}
/**
* Specifies information about the availability set that the virtual machine should be assigned to.
* Only tags may be updated.
*/
export interface AvailabilitySetUpdate extends UpdateResource {
/**
* Update Domain count.
*/
platformUpdateDomainCount?: number;
/**
* Fault Domain count.
*/
platformFaultDomainCount?: number;
/**
* A list of references to all virtual machines in the availability set.
*/
virtualMachines?: SubResource[];
/**
* The resource status information.
*/
readonly statuses?: InstanceViewStatus[];
/**
* Sku of the availability set
*/
sku?: Sku;
}
/**
* Describes the properties of a VM size.
*/
export interface VirtualMachineSize {
/**
* The name of the virtual machine size.
*/
name?: string;
/**
* The number of cores supported by the virtual machine size.
*/
numberOfCores?: number;
/**
* The OS disk size, in MB, allowed by the virtual machine size.
*/
osDiskSizeInMB?: number;
/**
* The resource disk size, in MB, allowed by the virtual machine size.
*/
resourceDiskSizeInMB?: number;
/**
* The amount of memory, in MB, supported by the virtual machine size.
*/
memoryInMB?: number;
/**
* The maximum number of data disks that can be attached to the virtual machine size.
*/
maxDataDiskCount?: number;
}
/**
* Describes a Virtual Machine Extension Image.
*/
export interface VirtualMachineExtensionImage extends Resource {
/**
* The operating system this extension supports.
*/
operatingSystem: string;
/**
* The type of role (IaaS or PaaS) this extension supports.
*/
computeRole: string;
/**
* The schema defined by publisher, where extension consumers should provide settings in a
* matching schema.
*/
handlerSchema: string;
/**
* Whether the extension can be used on xRP VMScaleSets. By default existing extensions are
* usable on scalesets, but there might be cases where a publisher wants to explicitly indicate
* the extension is only enabled for CRP VMs but not VMSS.
*/
vmScaleSetEnabled?: boolean;
/**
* Whether the handler can support multiple extensions.
*/
supportsMultipleExtensions?: boolean;
}
/**
* Virtual machine image resource information.
*/
export interface VirtualMachineImageResource extends SubResource {
/**
* The name of the resource.
*/
name: string;
/**
* The supported Azure location of the resource.
*/
location: string;
/**
* Specifies the tags that are assigned to the virtual machine. For more information about using
* tags, see [Using tags to organize your Azure
* resources](https://docs.microsoft.com/azure/azure-resource-manager/resource-group-using-tags.md).
*/
tags?: { [propertyName: string]: string };
}
/**
* The instance view of a virtual machine extension.
*/
export interface VirtualMachineExtensionInstanceView {
/**
* The virtual machine extension name.
*/
name?: string;
/**
* Specifies the type of the extension; an example is "CustomScriptExtension".
*/
type?: string;
/**
* Specifies the version of the script handler.
*/
typeHandlerVersion?: string;
/**
* The resource status information.
*/
substatuses?: InstanceViewStatus[];
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
}
/**
* Describes a Virtual Machine Extension.
*/
export interface VirtualMachineExtension extends Resource {
/**
* How the extension handler should be forced to update even if the extension configuration has
* not changed.
*/
forceUpdateTag?: string;
/**
* The name of the extension handler publisher.
*/
publisher?: string;
/**
* Specifies the type of the extension; an example is "CustomScriptExtension".
*/
virtualMachineExtensionType?: string;
/**
* Specifies the version of the script handler.
*/
typeHandlerVersion?: string;
/**
* Indicates whether the extension should use a newer minor version if one is available at
* deployment time. Once deployed, however, the extension will not upgrade minor versions unless
* redeployed, even with this property set to true.
*/
autoUpgradeMinorVersion?: boolean;
/**
* Json formatted public settings for the extension.
*/
settings?: any;
/**
* The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no
* protected settings at all.
*/
protectedSettings?: any;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* The virtual machine extension instance view.
*/
instanceView?: VirtualMachineExtensionInstanceView;
}
/**
* Describes a Virtual Machine Extension.
*/
export interface VirtualMachineExtensionUpdate extends UpdateResource {
/**
* How the extension handler should be forced to update even if the extension configuration has
* not changed.
*/
forceUpdateTag?: string;
/**
* The name of the extension handler publisher.
*/
publisher?: string;
/**
* Specifies the type of the extension; an example is "CustomScriptExtension".
*/
type?: string;
/**
* Specifies the version of the script handler.
*/
typeHandlerVersion?: string;
/**
* Indicates whether the extension should use a newer minor version if one is available at
* deployment time. Once deployed, however, the extension will not upgrade minor versions unless
* redeployed, even with this property set to true.
*/
autoUpgradeMinorVersion?: boolean;
/**
* Json formatted public settings for the extension.
*/
settings?: any;
/**
* The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no
* protected settings at all.
*/
protectedSettings?: any;
}
/**
* The List Extension operation response
*/
export interface VirtualMachineExtensionsListResult {
/**
* The list of extensions
*/
value?: VirtualMachineExtension[];
}
/**
* Used for establishing the purchase context of any 3rd Party artifact through MarketPlace.
*/
export interface PurchasePlan {
/**
* The publisher ID.
*/
publisher: string;
/**
* The plan ID.
*/
name: string;
/**
* Specifies the product of the image from the marketplace. This is the same value as Offer under
* the imageReference element.
*/
product: string;
}
/**
* Contains the os disk image information.
*/
export interface OSDiskImage {
/**
* The operating system of the osDiskImage. Possible values include: 'Windows', 'Linux'
*/
operatingSystem: string;
}
/**
* Contains the data disk images information.
*/
export interface DataDiskImage {
/**
* Specifies the logical unit number of the data disk. This value is used to identify data disks
* within the VM and therefore must be unique for each data disk attached to a VM.
*/
readonly lun?: number;
}
/**
* Describes automatic OS upgrade properties on the image.
*/
export interface AutomaticOSUpgradeProperties {
/**
* Specifies whether automatic OS upgrade is supported on the image.
*/
automaticOSUpgradeSupported: boolean;
}
/**
* Describes a Virtual Machine Image.
*/
export interface VirtualMachineImage extends VirtualMachineImageResource {
plan?: PurchasePlan;
osDiskImage?: OSDiskImage;
dataDiskImages?: DataDiskImage[];
automaticOSUpgradeProperties?: AutomaticOSUpgradeProperties;
}
/**
* The Usage Names.
*/
export interface UsageName {
/**
* The name of the resource.
*/
value?: string;
/**
* The localized name of the resource.
*/
localizedValue?: string;
}
/**
* Describes Compute Resource Usage.
*/
export interface Usage {
/**
* The current usage of the resource.
*/
currentValue: number;
/**
* The maximum permitted usage of the resource.
*/
limit: number;
/**
* The name of the type of usage.
*/
name: UsageName;
}
/**
* Parameters for Reimaging Virtual Machine. NOTE: Virtual Machine OS disk will always be reimaged
*/
export interface VirtualMachineReimageParameters {
/**
* Specifies whether to reimage temp disk. Default value: false.
*/
tempDisk?: boolean;
}
/**
* Capture Virtual Machine parameters.
*/
export interface VirtualMachineCaptureParameters {
/**
* The captured virtual hard disk's name prefix.
*/
vhdPrefix: string;
/**
* The destination container name.
*/
destinationContainerName: string;
/**
* Specifies whether to overwrite the destination virtual hard disk, in case of conflict.
*/
overwriteVhds: boolean;
}
/**
* Output of virtual machine capture operation.
*/
export interface VirtualMachineCaptureResult extends SubResource {
/**
* the schema of the captured virtual machine
*/
readonly schema?: string;
/**
* the version of the content
*/
readonly contentVersion?: string;
/**
* parameters of the captured virtual machine
*/
readonly parameters?: any;
/**
* a list of resource items of the captured virtual machine
*/
readonly resources?: any[];
}
/**
* Specifies information about the marketplace image used to create the virtual machine. This
* element is only used for marketplace images. Before you can use a marketplace image from an API,
* you must enable the image for programmatic use. In the Azure portal, find the marketplace image
* that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter
* any required information and then click **Save**.
*/
export interface Plan {
/**
* The plan ID.
*/
name?: string;
/**
* The publisher ID.
*/
publisher?: string;
/**
* Specifies the product of the image from the marketplace. This is the same value as Offer under
* the imageReference element.
*/
product?: string;
/**
* The promotion code.
*/
promotionCode?: string;
}
/**
* Specifies the hardware settings for the virtual machine.
*/
export interface HardwareProfile {
/**
* Specifies the size of the virtual machine. For more information about virtual machine sizes,
* see [Sizes for virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-sizes?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
* <br><br> The available VM sizes depend on region and availability set. For a list of available
* sizes use these APIs: <br><br> [List all available virtual machine sizes in an availability
* set](https://docs.microsoft.com/rest/api/compute/availabilitysets/listavailablesizes) <br><br>
* [List all available virtual machine sizes in a
* region](https://docs.microsoft.com/rest/api/compute/virtualmachinesizes/list) <br><br> [List
* all available virtual machine sizes for
* resizing](https://docs.microsoft.com/rest/api/compute/virtualmachines/listavailablesizes).
* Possible values include: 'Basic_A0', 'Basic_A1', 'Basic_A2', 'Basic_A3', 'Basic_A4',
* 'Standard_A0', 'Standard_A1', 'Standard_A2', 'Standard_A3', 'Standard_A4', 'Standard_A5',
* 'Standard_A6', 'Standard_A7', 'Standard_A8', 'Standard_A9', 'Standard_A10', 'Standard_A11',
* 'Standard_A1_v2', 'Standard_A2_v2', 'Standard_A4_v2', 'Standard_A8_v2', 'Standard_A2m_v2',
* 'Standard_A4m_v2', 'Standard_A8m_v2', 'Standard_B1s', 'Standard_B1ms', 'Standard_B2s',
* 'Standard_B2ms', 'Standard_B4ms', 'Standard_B8ms', 'Standard_D1', 'Standard_D2',
* 'Standard_D3', 'Standard_D4', 'Standard_D11', 'Standard_D12', 'Standard_D13', 'Standard_D14',
* 'Standard_D1_v2', 'Standard_D2_v2', 'Standard_D3_v2', 'Standard_D4_v2', 'Standard_D5_v2',
* 'Standard_D2_v3', 'Standard_D4_v3', 'Standard_D8_v3', 'Standard_D16_v3', 'Standard_D32_v3',
* 'Standard_D64_v3', 'Standard_D2s_v3', 'Standard_D4s_v3', 'Standard_D8s_v3',
* 'Standard_D16s_v3', 'Standard_D32s_v3', 'Standard_D64s_v3', 'Standard_D11_v2',
* 'Standard_D12_v2', 'Standard_D13_v2', 'Standard_D14_v2', 'Standard_D15_v2', 'Standard_DS1',
* 'Standard_DS2', 'Standard_DS3', 'Standard_DS4', 'Standard_DS11', 'Standard_DS12',
* 'Standard_DS13', 'Standard_DS14', 'Standard_DS1_v2', 'Standard_DS2_v2', 'Standard_DS3_v2',
* 'Standard_DS4_v2', 'Standard_DS5_v2', 'Standard_DS11_v2', 'Standard_DS12_v2',
* 'Standard_DS13_v2', 'Standard_DS14_v2', 'Standard_DS15_v2', 'Standard_DS13-4_v2',
* 'Standard_DS13-2_v2', 'Standard_DS14-8_v2', 'Standard_DS14-4_v2', 'Standard_E2_v3',
* 'Standard_E4_v3', 'Standard_E8_v3', 'Standard_E16_v3', 'Standard_E32_v3', 'Standard_E64_v3',
* 'Standard_E2s_v3', 'Standard_E4s_v3', 'Standard_E8s_v3', 'Standard_E16s_v3',
* 'Standard_E32s_v3', 'Standard_E64s_v3', 'Standard_E32-16_v3', 'Standard_E32-8s_v3',
* 'Standard_E64-32s_v3', 'Standard_E64-16s_v3', 'Standard_F1', 'Standard_F2', 'Standard_F4',
* 'Standard_F8', 'Standard_F16', 'Standard_F1s', 'Standard_F2s', 'Standard_F4s', 'Standard_F8s',
* 'Standard_F16s', 'Standard_F2s_v2', 'Standard_F4s_v2', 'Standard_F8s_v2', 'Standard_F16s_v2',
* 'Standard_F32s_v2', 'Standard_F64s_v2', 'Standard_F72s_v2', 'Standard_G1', 'Standard_G2',
* 'Standard_G3', 'Standard_G4', 'Standard_G5', 'Standard_GS1', 'Standard_GS2', 'Standard_GS3',
* 'Standard_GS4', 'Standard_GS5', 'Standard_GS4-8', 'Standard_GS4-4', 'Standard_GS5-16',
* 'Standard_GS5-8', 'Standard_H8', 'Standard_H16', 'Standard_H8m', 'Standard_H16m',
* 'Standard_H16r', 'Standard_H16mr', 'Standard_L4s', 'Standard_L8s', 'Standard_L16s',
* 'Standard_L32s', 'Standard_M64s', 'Standard_M64ms', 'Standard_M128s', 'Standard_M128ms',
* 'Standard_M64-32ms', 'Standard_M64-16ms', 'Standard_M128-64ms', 'Standard_M128-32ms',
* 'Standard_NC6', 'Standard_NC12', 'Standard_NC24', 'Standard_NC24r', 'Standard_NC6s_v2',
* 'Standard_NC12s_v2', 'Standard_NC24s_v2', 'Standard_NC24rs_v2', 'Standard_NC6s_v3',
* 'Standard_NC12s_v3', 'Standard_NC24s_v3', 'Standard_NC24rs_v3', 'Standard_ND6s',
* 'Standard_ND12s', 'Standard_ND24s', 'Standard_ND24rs', 'Standard_NV6', 'Standard_NV12',
* 'Standard_NV24'
*/
vmSize?: string;
}
/**
* Specifies information about the image to use. You can specify information about platform images,
* marketplace images, or virtual machine images. This element is required when you want to use a
* platform image, marketplace image, or virtual machine image, but is not used in other creation
* operations.
*/
export interface ImageReference extends SubResource {
/**
* The image publisher.
*/
publisher?: string;
/**
* Specifies the offer of the platform image or marketplace image used to create the virtual
* machine.
*/
offer?: string;
/**
* The image SKU.
*/
sku?: string;
/**
* Specifies the version of the platform image or marketplace image used to create the virtual
* machine. The allowed formats are Major.Minor.Build or 'latest'. Major, Minor, and Build are
* decimal numbers. Specify 'latest' to use the latest version of an image available at deploy
* time. Even if you use 'latest', the VM image will not automatically update after deploy time
* even if a new version becomes available.
*/
version?: string;
}
/**
* Describes a reference to Key Vault Secret
*/
export interface KeyVaultSecretReference {
/**
* The URL referencing a secret in a Key Vault.
*/
secretUrl: string;
/**
* The relative URL of the Key Vault containing the secret.
*/
sourceVault: SubResource;
}
/**
* Describes a reference to Key Vault Key
*/
export interface KeyVaultKeyReference {
/**
* The URL referencing a key encryption key in Key Vault.
*/
keyUrl: string;
/**
* The relative URL of the Key Vault containing the key.
*/
sourceVault: SubResource;
}
/**
* Describes a Encryption Settings for a Disk
*/
export interface DiskEncryptionSettings {
/**
* Specifies the location of the disk encryption key, which is a Key Vault Secret.
*/
diskEncryptionKey?: KeyVaultSecretReference;
/**
* Specifies the location of the key encryption key in Key Vault.
*/
keyEncryptionKey?: KeyVaultKeyReference;
/**
* Specifies whether disk encryption should be enabled on the virtual machine.
*/
enabled?: boolean;
}
/**
* Describes the uri of a disk.
*/
export interface VirtualHardDisk {
/**
* Specifies the virtual hard disk's uri.
*/
uri?: string;
}
/**
* Describes the parameters of ephemeral disk settings that can be specified for operating system
* disk. <br><br> NOTE: The ephemeral disk settings can only be specified for managed disk.
*/
export interface DiffDiskSettings {
/**
* Specifies the ephemeral disk settings for operating system disk. Possible values include:
* 'Local'
*/
option?: string;
}
/**
* The parameters of a managed disk.
*/
export interface ManagedDiskParameters extends SubResource {
/**
* Specifies the storage account type for the managed disk. NOTE: UltraSSD_LRS can only be used
* with data disks, it cannot be used with OS Disk. Possible values include: 'Standard_LRS',
* 'Premium_LRS', 'StandardSSD_LRS', 'UltraSSD_LRS'
*/
storageAccountType?: string;
}
/**
* Specifies information about the operating system disk used by the virtual machine. <br><br> For
* more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
export interface OSDisk {
/**
* This property allows you to specify the type of the OS that is included in the disk if
* creating a VM from user-image or a specialized VHD. <br><br> Possible values are: <br><br>
* **Windows** <br><br> **Linux**. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* Specifies the encryption settings for the OS Disk. <br><br> Minimum api-version: 2015-06-15
*/
encryptionSettings?: DiskEncryptionSettings;
/**
* The disk name.
*/
name?: string;
/**
* The virtual hard disk.
*/
vhd?: VirtualHardDisk;
/**
* The source user image virtual hard disk. The virtual hard disk will be copied before being
* attached to the virtual machine. If SourceImage is provided, the destination virtual hard
* drive must not exist.
*/
image?: VirtualHardDisk;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies whether writeAccelerator should be enabled or disabled on the disk.
*/
writeAcceleratorEnabled?: boolean;
/**
* Specifies the ephemeral Disk Settings for the operating system disk used by the virtual
* machine.
*/
diffDiskSettings?: DiffDiskSettings;
/**
* Specifies how the virtual machine should be created.<br><br> Possible values are:<br><br>
* **Attach** \u2013 This value is used when you are using a specialized disk to create the
* virtual machine.<br><br> **FromImage** \u2013 This value is used when you are using an image
* to create the virtual machine. If you are using a platform image, you also use the
* imageReference element described above. If you are using a marketplace image, you also use
* the plan element previously described. Possible values include: 'FromImage', 'Empty', 'Attach'
*/
createOption: string;
/**
* Specifies the size of an empty data disk in gigabytes. This element can be used to overwrite
* the size of the disk in a virtual machine image. <br><br> This value cannot be larger than
* 1023 GB
*/
diskSizeGB?: number;
/**
* The managed disk parameters.
*/
managedDisk?: ManagedDiskParameters;
}
/**
* Describes a data disk.
*/
export interface DataDisk {
/**
* Specifies the logical unit number of the data disk. This value is used to identify data disks
* within the VM and therefore must be unique for each data disk attached to a VM.
*/
lun: number;
/**
* The disk name.
*/
name?: string;
/**
* The virtual hard disk.
*/
vhd?: VirtualHardDisk;
/**
* The source user image virtual hard disk. The virtual hard disk will be copied before being
* attached to the virtual machine. If SourceImage is provided, the destination virtual hard
* drive must not exist.
*/
image?: VirtualHardDisk;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies whether writeAccelerator should be enabled or disabled on the disk.
*/
writeAcceleratorEnabled?: boolean;
/**
* Specifies how the virtual machine should be created.<br><br> Possible values are:<br><br>
* **Attach** \u2013 This value is used when you are using a specialized disk to create the
* virtual machine.<br><br> **FromImage** \u2013 This value is used when you are using an image
* to create the virtual machine. If you are using a platform image, you also use the
* imageReference element described above. If you are using a marketplace image, you also use
* the plan element previously described. Possible values include: 'FromImage', 'Empty', 'Attach'
*/
createOption: string;
/**
* Specifies the size of an empty data disk in gigabytes. This element can be used to overwrite
* the size of the disk in a virtual machine image. <br><br> This value cannot be larger than
* 1023 GB
*/
diskSizeGB?: number;
/**
* The managed disk parameters.
*/
managedDisk?: ManagedDiskParameters;
}
/**
* Specifies the storage settings for the virtual machine disks.
*/
export interface StorageProfile {
/**
* Specifies information about the image to use. You can specify information about platform
* images, marketplace images, or virtual machine images. This element is required when you want
* to use a platform image, marketplace image, or virtual machine image, but is not used in other
* creation operations.
*/
imageReference?: ImageReference;
/**
* Specifies information about the operating system disk used by the virtual machine. <br><br>
* For more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
osDisk?: OSDisk;
/**
* Specifies the parameters that are used to add a data disk to a virtual machine. <br><br> For
* more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
dataDisks?: DataDisk[];
}
/**
* Enables or disables a capability on the virtual machine or virtual machine scale set.
*/
export interface AdditionalCapabilities {
/**
* The flag that enables or disables a capability to have one or more managed data disks with
* UltraSSD_LRS storage account type on the VM or VMSS. Managed disks with storage account type
* UltraSSD_LRS can be added to a virtual machine or virtual machine scale set only if this
* property is enabled.
*/
ultraSSDEnabled?: boolean;
}
/**
* Specifies additional XML formatted information that can be included in the Unattend.xml file,
* which is used by Windows Setup. Contents are defined by setting name, component name, and the
* pass in which the content is applied.
*/
export interface AdditionalUnattendContent {
/**
* The pass name. Currently, the only allowable value is OobeSystem. Possible values include:
* 'OobeSystem'
*/
passName?: string;
/**
* The component name. Currently, the only allowable value is Microsoft-Windows-Shell-Setup.
* Possible values include: 'Microsoft-Windows-Shell-Setup'
*/
componentName?: string;
/**
* Specifies the name of the setting to which the content applies. Possible values are:
* FirstLogonCommands and AutoLogon. Possible values include: 'AutoLogon', 'FirstLogonCommands'
*/
settingName?: string;
/**
* Specifies the XML formatted content that is added to the unattend.xml file for the specified
* path and component. The XML must be less than 4KB and must include the root element for the
* setting or feature that is being inserted.
*/
content?: string;
}
/**
* Describes Protocol and thumbprint of Windows Remote Management listener
*/
export interface WinRMListener {
/**
* Specifies the protocol of listener. <br><br> Possible values are: <br>**http** <br><br>
* **https**. Possible values include: 'Http', 'Https'
*/
protocol?: string;
/**
* This is the URL of a certificate that has been uploaded to Key Vault as a secret. For adding a
* secret to the Key Vault, see [Add a key or secret to the key
* vault](https://docs.microsoft.com/azure/key-vault/key-vault-get-started/#add). In this case,
* your certificate needs to be It is the Base64 encoding of the following JSON Object which is
* encoded in UTF-8: <br><br> {<br> "data":"<Base64-encoded-certificate>",<br>
* "dataType":"pfx",<br> "password":"<pfx-file-password>"<br>}
*/
certificateUrl?: string;
}
/**
* Describes Windows Remote Management configuration of the VM
*/
export interface WinRMConfiguration {
/**
* The list of Windows Remote Management listeners
*/
listeners?: WinRMListener[];
}
/**
* Specifies Windows operating system settings on the virtual machine.
*/
export interface WindowsConfiguration {
/**
* Indicates whether virtual machine agent should be provisioned on the virtual machine. <br><br>
* When this property is not specified in the request body, default behavior is to set it to
* true. This will ensure that VM Agent is installed on the VM so that extensions can be added
* to the VM later.
*/
provisionVMAgent?: boolean;
/**
* Indicates whether virtual machine is enabled for automatic updates.
*/
enableAutomaticUpdates?: boolean;
/**
* Specifies the time zone of the virtual machine. e.g. "Pacific Standard Time"
*/
timeZone?: string;
/**
* Specifies additional base-64 encoded XML formatted information that can be included in the
* Unattend.xml file, which is used by Windows Setup.
*/
additionalUnattendContent?: AdditionalUnattendContent[];
/**
* Specifies the Windows Remote Management listeners. This enables remote Windows PowerShell.
*/
winRM?: WinRMConfiguration;
}
/**
* Contains information about SSH certificate public key and the path on the Linux VM where the
* public key is placed.
*/
export interface SshPublicKey {
/**
* Specifies the full path on the created VM where ssh public key is stored. If the file already
* exists, the specified key is appended to the file. Example: /home/user/.ssh/authorized_keys
*/
path?: string;
/**
* SSH public key certificate used to authenticate with the VM through ssh. The key needs to be
* at least 2048-bit and in ssh-rsa format. <br><br> For creating ssh keys, see [Create SSH keys
* on Linux and Mac for Linux VMs in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-mac-create-ssh-keys?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json).
*/
keyData?: string;
}
/**
* SSH configuration for Linux based VMs running on Azure
*/
export interface SshConfiguration {
/**
* The list of SSH public keys used to authenticate with linux based VMs.
*/
publicKeys?: SshPublicKey[];
}
/**
* Specifies the Linux operating system settings on the virtual machine. <br><br>For a list of
* supported Linux distributions, see [Linux on Azure-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-endorsed-distros?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
* <br><br> For running non-endorsed distributions, see [Information for Non-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-create-upload-generic?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json).
*/
export interface LinuxConfiguration {
/**
* Specifies whether password authentication should be disabled.
*/
disablePasswordAuthentication?: boolean;
/**
* Specifies the ssh key configuration for a Linux OS.
*/
ssh?: SshConfiguration;
/**
* Indicates whether virtual machine agent should be provisioned on the virtual machine. <br><br>
* When this property is not specified in the request body, default behavior is to set it to
* true. This will ensure that VM Agent is installed on the VM so that extensions can be added
* to the VM later.
*/
provisionVMAgent?: boolean;
}
/**
* Describes a single certificate reference in a Key Vault, and where the certificate should reside
* on the VM.
*/
export interface VaultCertificate {
/**
* This is the URL of a certificate that has been uploaded to Key Vault as a secret. For adding a
* secret to the Key Vault, see [Add a key or secret to the key
* vault](https://docs.microsoft.com/azure/key-vault/key-vault-get-started/#add). In this case,
* your certificate needs to be It is the Base64 encoding of the following JSON Object which is
* encoded in UTF-8: <br><br> {<br> "data":"<Base64-encoded-certificate>",<br>
* "dataType":"pfx",<br> "password":"<pfx-file-password>"<br>}
*/
certificateUrl?: string;
/**
* For Windows VMs, specifies the certificate store on the Virtual Machine to which the
* certificate should be added. The specified certificate store is implicitly in the LocalMachine
* account. <br><br>For Linux VMs, the certificate file is placed under the /var/lib/waagent
* directory, with the file name <UppercaseThumbprint>.crt for the X509 certificate file
* and <UppercaseThumbprint>.prv for private key. Both of these files are .pem formatted.
*/
certificateStore?: string;
}
/**
* Describes a set of certificates which are all in the same Key Vault.
*/
export interface VaultSecretGroup {
/**
* The relative URL of the Key Vault containing all of the certificates in VaultCertificates.
*/
sourceVault?: SubResource;
/**
* The list of key vault references in SourceVault which contain certificates.
*/
vaultCertificates?: VaultCertificate[];
}
/**
* Specifies the operating system settings for the virtual machine.
*/
export interface OSProfile {
/**
* Specifies the host OS name of the virtual machine. <br><br> This name cannot be updated after
* the VM is created. <br><br> **Max-length (Windows):** 15 characters <br><br> **Max-length
* (Linux):** 64 characters. <br><br> For naming conventions and restrictions see [Azure
* infrastructure services implementation
* guidelines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-infrastructure-subscription-accounts-guidelines?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json#1-naming-conventions).
*/
computerName?: string;
/**
* Specifies the name of the administrator account. <br><br> **Windows-only restriction:** Cannot
* end in "." <br><br> **Disallowed values:** "administrator", "admin", "user", "user1", "test",
* "user2", "test1", "user3", "admin1", "1", "123", "a", "actuser", "adm", "admin2", "aspnet",
* "backup", "console", "david", "guest", "john", "owner", "root", "server", "sql", "support",
* "support_388945a0", "sys", "test2", "test3", "user4", "user5". <br><br> **Minimum-length
* (Linux):** 1 character <br><br> **Max-length (Linux):** 64 characters <br><br> **Max-length
* (Windows):** 20 characters <br><br><li> For root access to the Linux VM, see [Using root
* privileges on Linux virtual machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-use-root-privileges?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)<br><li>
* For a list of built-in system users on Linux that should not be used in this field, see
* [Selecting User Names for Linux on
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-usernames?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
*/
adminUsername?: string;
/**
* Specifies the password of the administrator account. <br><br> **Minimum-length (Windows):** 8
* characters <br><br> **Minimum-length (Linux):** 6 characters <br><br> **Max-length
* (Windows):** 123 characters <br><br> **Max-length (Linux):** 72 characters <br><br>
* **Complexity requirements:** 3 out of 4 conditions below need to be fulfilled <br> Has lower
* characters <br>Has upper characters <br> Has a digit <br> Has a special character (Regex match
* [\W_]) <br><br> **Disallowed values:** "abc@123", "P@$$w0rd", "P@ssw0rd", "P@ssword123",
* "Pa$$word", "pass@word1", "Password!", "Password1", "Password22", "iloveyou!" <br><br> For
* resetting the password, see [How to reset the Remote Desktop service or its login password in
* a Windows
* VM](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-reset-rdp?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> For resetting root password, see [Manage users, SSH, and check or repair disks on
* Azure Linux VMs using the VMAccess
* Extension](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-vmaccess-extension?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json#reset-root-password)
*/
adminPassword?: string;
/**
* Specifies a base-64 encoded string of custom data. The base-64 encoded string is decoded to a
* binary array that is saved as a file on the Virtual Machine. The maximum length of the binary
* array is 65535 bytes. <br><br> For using cloud-init for your VM, see [Using cloud-init to
* customize a Linux VM during
* creation](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-cloud-init?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
*/
customData?: string;
/**
* Specifies Windows operating system settings on the virtual machine.
*/
windowsConfiguration?: WindowsConfiguration;
/**
* Specifies the Linux operating system settings on the virtual machine. <br><br>For a list of
* supported Linux distributions, see [Linux on Azure-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-endorsed-distros?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
* <br><br> For running non-endorsed distributions, see [Information for Non-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-create-upload-generic?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json).
*/
linuxConfiguration?: LinuxConfiguration;
/**
* Specifies set of certificates that should be installed onto the virtual machine.
*/
secrets?: VaultSecretGroup[];
/**
* Specifies whether extension operations should be allowed on the virtual machine. <br><br>This
* may only be set to False when no extensions are present on the virtual machine.
*/
allowExtensionOperations?: boolean;
}
/**
* Describes a network interface reference.
*/
export interface NetworkInterfaceReference extends SubResource {
/**
* Specifies the primary network interface in case the virtual machine has more than 1 network
* interface.
*/
primary?: boolean;
}
/**
* Specifies the network interfaces of the virtual machine.
*/
export interface NetworkProfile {
/**
* Specifies the list of resource Ids for the network interfaces associated with the virtual
* machine.
*/
networkInterfaces?: NetworkInterfaceReference[];
}
/**
* Boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot
* to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br>
* Azure also enables you to see a screenshot of the VM from the hypervisor.
*/
export interface BootDiagnostics {
/**
* Whether boot diagnostics should be enabled on the Virtual Machine.
*/
enabled?: boolean;
/**
* Uri of the storage account to use for placing the console output and screenshot.
*/
storageUri?: string;
}
/**
* Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
*/
export interface DiagnosticsProfile {
/**
* Boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot
* to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br>
* Azure also enables you to see a screenshot of the VM from the hypervisor.
*/
bootDiagnostics?: BootDiagnostics;
}
/**
* The instance view of a virtual machine extension handler.
*/
export interface VirtualMachineExtensionHandlerInstanceView {
/**
* Specifies the type of the extension; an example is "CustomScriptExtension".
*/
type?: string;
/**
* Specifies the version of the script handler.
*/
typeHandlerVersion?: string;
/**
* The extension handler status.
*/
status?: InstanceViewStatus;
}
/**
* The instance view of the VM Agent running on the virtual machine.
*/
export interface VirtualMachineAgentInstanceView {
/**
* The VM Agent full version.
*/
vmAgentVersion?: string;
/**
* The virtual machine extension handler instance view.
*/
extensionHandlers?: VirtualMachineExtensionHandlerInstanceView[];
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
}
/**
* The instance view of the disk.
*/
export interface DiskInstanceView {
/**
* The disk name.
*/
name?: string;
/**
* Specifies the encryption settings for the OS Disk. <br><br> Minimum api-version: 2015-06-15
*/
encryptionSettings?: DiskEncryptionSettings[];
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
}
/**
* The instance view of a virtual machine boot diagnostics.
*/
export interface BootDiagnosticsInstanceView {
/**
* The console screenshot blob URI.
*/
readonly consoleScreenshotBlobUri?: string;
/**
* The Linux serial console log blob Uri.
*/
readonly serialConsoleLogBlobUri?: string;
/**
* The boot diagnostics status information for the VM. <br><br> NOTE: It will be set only if
* there are errors encountered in enabling boot diagnostics.
*/
readonly status?: InstanceViewStatus;
}
export interface VirtualMachineIdentityUserAssignedIdentitiesValue {
/**
* The principal id of user assigned identity.
*/
readonly principalId?: string;
/**
* The client id of user assigned identity.
*/
readonly clientId?: string;
}
/**
* Identity for the virtual machine.
*/
export interface VirtualMachineIdentity {
/**
* The principal id of virtual machine identity. This property will only be provided for a system
* assigned identity.
*/
readonly principalId?: string;
/**
* The tenant id associated with the virtual machine. This property will only be provided for a
* system assigned identity.
*/
readonly tenantId?: string;
/**
* The type of identity used for the virtual machine. The type 'SystemAssigned, UserAssigned'
* includes both an implicitly created identity and a set of user assigned identities. The type
* 'None' will remove any identities from the virtual machine. Possible values include:
* 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned', 'None'
*/
type?: string;
/**
* The list of user identities associated with the Virtual Machine. The user identity dictionary
* key references will be ARM resource ids in the form:
* '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
*/
userAssignedIdentities?: { [propertyName: string]:
VirtualMachineIdentityUserAssignedIdentitiesValue };
}
/**
* Maintenance Operation Status.
*/
export interface MaintenanceRedeployStatus {
/**
* True, if customer is allowed to perform Maintenance.
*/
isCustomerInitiatedMaintenanceAllowed?: boolean;
/**
* Start Time for the Pre Maintenance Window.
*/
preMaintenanceWindowStartTime?: Date;
/**
* End Time for the Pre Maintenance Window.
*/
preMaintenanceWindowEndTime?: Date;
/**
* Start Time for the Maintenance Window.
*/
maintenanceWindowStartTime?: Date;
/**
* End Time for the Maintenance Window.
*/
maintenanceWindowEndTime?: Date;
/**
* The Last Maintenance Operation Result Code. Possible values include: 'None', 'RetryLater',
* 'MaintenanceAborted', 'MaintenanceCompleted'
*/
lastOperationResultCode?: string;
/**
* Message returned for the last Maintenance Operation.
*/
lastOperationMessage?: string;
}
/**
* The instance view of a virtual machine.
*/
export interface VirtualMachineInstanceView {
/**
* Specifies the update domain of the virtual machine.
*/
platformUpdateDomain?: number;
/**
* Specifies the fault domain of the virtual machine.
*/
platformFaultDomain?: number;
/**
* The computer name assigned to the virtual machine.
*/
computerName?: string;
/**
* The Operating System running on the virtual machine.
*/
osName?: string;
/**
* The version of Operating System running on the virtual machine.
*/
osVersion?: string;
/**
* The Remote desktop certificate thumbprint.
*/
rdpThumbPrint?: string;
/**
* The VM Agent running on the virtual machine.
*/
vmAgent?: VirtualMachineAgentInstanceView;
/**
* The Maintenance Operation status on the virtual machine.
*/
maintenanceRedeployStatus?: MaintenanceRedeployStatus;
/**
* The virtual machine disk information.
*/
disks?: DiskInstanceView[];
/**
* The extensions information.
*/
extensions?: VirtualMachineExtensionInstanceView[];
/**
* Boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot
* to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br>
* Azure also enables you to see a screenshot of the VM from the hypervisor.
*/
bootDiagnostics?: BootDiagnosticsInstanceView;
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
}
/**
* Describes a Virtual Machine.
*/
export interface VirtualMachine extends Resource {
/**
* Specifies information about the marketplace image used to create the virtual machine. This
* element is only used for marketplace images. Before you can use a marketplace image from an
* API, you must enable the image for programmatic use. In the Azure portal, find the
* marketplace image that you want to use and then click **Want to deploy programmatically, Get
* Started ->**. Enter any required information and then click **Save**.
*/
plan?: Plan;
/**
* Specifies the hardware settings for the virtual machine.
*/
hardwareProfile?: HardwareProfile;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: StorageProfile;
/**
* Specifies additional capabilities enabled or disabled on the virtual machine.
*/
additionalCapabilities?: AdditionalCapabilities;
/**
* Specifies the operating system settings for the virtual machine.
*/
osProfile?: OSProfile;
/**
* Specifies the network interfaces of the virtual machine.
*/
networkProfile?: NetworkProfile;
/**
* Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
*/
diagnosticsProfile?: DiagnosticsProfile;
/**
* Specifies information about the availability set that the virtual machine should be assigned
* to. Virtual machines specified in the same availability set are allocated to different nodes
* to maximize availability. For more information about availability sets, see [Manage the
* availability of virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
* <br><br> For more information on Azure planned maintenance, see [Planned maintenance for
* virtual machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Currently, a VM can only be added to availability set at creation time. An existing
* VM cannot be added to an availability set.
*/
availabilitySet?: SubResource;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* The virtual machine instance view.
*/
readonly instanceView?: VirtualMachineInstanceView;
/**
* Specifies that the image or disk that is being used was licensed on-premises. This element is
* only used for images that contain the Windows Server operating system. <br><br> Possible
* values are: <br><br> Windows_Client <br><br> Windows_Server <br><br> If this element is
* included in a request for an update, the value must match the initial value. This value cannot
* be updated. <br><br> For more information, see [Azure Hybrid Use Benefit for Windows
* Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Minimum api-version: 2015-06-15
*/
licenseType?: string;
/**
* Specifies the VM unique ID which is a 128-bits identifier that is encoded and stored in all
* Azure IaaS VMs SMBIOS and can be read using platform BIOS commands.
*/
readonly vmId?: string;
/**
* The virtual machine child extension resources.
*/
readonly resources?: VirtualMachineExtension[];
/**
* The identity of the virtual machine, if configured.
*/
identity?: VirtualMachineIdentity;
/**
* The virtual machine zones.
*/
zones?: string[];
}
/**
* Describes a Virtual Machine Update.
*/
export interface VirtualMachineUpdate extends UpdateResource {
/**
* Specifies information about the marketplace image used to create the virtual machine. This
* element is only used for marketplace images. Before you can use a marketplace image from an
* API, you must enable the image for programmatic use. In the Azure portal, find the
* marketplace image that you want to use and then click **Want to deploy programmatically, Get
* Started ->**. Enter any required information and then click **Save**.
*/
plan?: Plan;
/**
* Specifies the hardware settings for the virtual machine.
*/
hardwareProfile?: HardwareProfile;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: StorageProfile;
/**
* Specifies additional capabilities enabled or disabled on the virtual machine.
*/
additionalCapabilities?: AdditionalCapabilities;
/**
* Specifies the operating system settings for the virtual machine.
*/
osProfile?: OSProfile;
/**
* Specifies the network interfaces of the virtual machine.
*/
networkProfile?: NetworkProfile;
/**
* Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
*/
diagnosticsProfile?: DiagnosticsProfile;
/**
* Specifies information about the availability set that the virtual machine should be assigned
* to. Virtual machines specified in the same availability set are allocated to different nodes
* to maximize availability. For more information about availability sets, see [Manage the
* availability of virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
* <br><br> For more information on Azure planned maintenance, see [Planned maintenance for
* virtual machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Currently, a VM can only be added to availability set at creation time. An existing
* VM cannot be added to an availability set.
*/
availabilitySet?: SubResource;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* The virtual machine instance view.
*/
readonly instanceView?: VirtualMachineInstanceView;
/**
* Specifies that the image or disk that is being used was licensed on-premises. This element is
* only used for images that contain the Windows Server operating system. <br><br> Possible
* values are: <br><br> Windows_Client <br><br> Windows_Server <br><br> If this element is
* included in a request for an update, the value must match the initial value. This value cannot
* be updated. <br><br> For more information, see [Azure Hybrid Use Benefit for Windows
* Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Minimum api-version: 2015-06-15
*/
licenseType?: string;
/**
* Specifies the VM unique ID which is a 128-bits identifier that is encoded and stored in all
* Azure IaaS VMs SMBIOS and can be read using platform BIOS commands.
*/
readonly vmId?: string;
/**
* The identity of the virtual machine, if configured.
*/
identity?: VirtualMachineIdentity;
/**
* The virtual machine zones.
*/
zones?: string[];
}
/**
* The configuration parameters used for performing automatic OS upgrade.
*/
export interface AutomaticOSUpgradePolicy {
/**
* Whether OS upgrades should automatically be applied to scale set instances in a rolling
* fashion when a newer version of the image becomes available. Default value is false.
*/
enableAutomaticOSUpgrade?: boolean;
/**
* Whether OS image rollback feature should be disabled. Default value is false.
*/
disableAutomaticRollback?: boolean;
}
/**
* The configuration parameters used while performing a rolling upgrade.
*/
export interface RollingUpgradePolicy {
/**
* The maximum percent of total virtual machine instances that will be upgraded simultaneously by
* the rolling upgrade in one batch. As this is a maximum, unhealthy instances in previous or
* future batches can cause the percentage of instances in a batch to decrease to ensure higher
* reliability. The default value for this parameter is 20%.
*/
maxBatchInstancePercent?: number;
/**
* The maximum percentage of the total virtual machine instances in the scale set that can be
* simultaneously unhealthy, either as a result of being upgraded, or by being found in an
* unhealthy state by the virtual machine health checks before the rolling upgrade aborts. This
* constraint will be checked prior to starting any batch. The default value for this parameter
* is 20%.
*/
maxUnhealthyInstancePercent?: number;
/**
* The maximum percentage of upgraded virtual machine instances that can be found to be in an
* unhealthy state. This check will happen after each batch is upgraded. If this percentage is
* ever exceeded, the rolling update aborts. The default value for this parameter is 20%.
*/
maxUnhealthyUpgradedInstancePercent?: number;
/**
* The wait time between completing the update for all virtual machines in one batch and starting
* the next batch. The time duration should be specified in ISO 8601 format. The default value is
* 0 seconds (PT0S).
*/
pauseTimeBetweenBatches?: string;
}
/**
* Describes an upgrade policy - automatic, manual, or rolling.
*/
export interface UpgradePolicy {
/**
* Specifies the mode of an upgrade to virtual machines in the scale set.<br /><br /> Possible
* values are:<br /><br /> **Manual** - You control the application of updates to virtual
* machines in the scale set. You do this by using the manualUpgrade action.<br /><br />
* **Automatic** - All virtual machines in the scale set are automatically updated at the same
* time. Possible values include: 'Automatic', 'Manual', 'Rolling'
*/
mode?: string;
/**
* The configuration parameters used while performing a rolling upgrade.
*/
rollingUpgradePolicy?: RollingUpgradePolicy;
/**
* Configuration parameters used for performing automatic OS Upgrade.
*/
automaticOSUpgradePolicy?: AutomaticOSUpgradePolicy;
}
/**
* Describes an Operating System disk.
*/
export interface ImageOSDisk {
/**
* This property allows you to specify the type of the OS that is included in the disk if
* creating a VM from a custom image. <br><br> Possible values are: <br><br> **Windows** <br><br>
* **Linux**. Possible values include: 'Windows', 'Linux'
*/
osType: string;
/**
* The OS State. Possible values include: 'Generalized', 'Specialized'
*/
osState: string;
/**
* The snapshot.
*/
snapshot?: SubResource;
/**
* The managedDisk.
*/
managedDisk?: SubResource;
/**
* The Virtual Hard Disk.
*/
blobUri?: string;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies the size of empty data disks in gigabytes. This element can be used to overwrite the
* name of the disk in a virtual machine image. <br><br> This value cannot be larger than 1023 GB
*/
diskSizeGB?: number;
/**
* Specifies the storage account type for the managed disk. UltraSSD_LRS cannot be used with OS
* Disk. Possible values include: 'Standard_LRS', 'Premium_LRS', 'StandardSSD_LRS',
* 'UltraSSD_LRS'
*/
storageAccountType?: string;
}
/**
* Describes a data disk.
*/
export interface ImageDataDisk {
/**
* Specifies the logical unit number of the data disk. This value is used to identify data disks
* within the VM and therefore must be unique for each data disk attached to a VM.
*/
lun: number;
/**
* The snapshot.
*/
snapshot?: SubResource;
/**
* The managedDisk.
*/
managedDisk?: SubResource;
/**
* The Virtual Hard Disk.
*/
blobUri?: string;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies the size of empty data disks in gigabytes. This element can be used to overwrite the
* name of the disk in a virtual machine image. <br><br> This value cannot be larger than 1023 GB
*/
diskSizeGB?: number;
/**
* Specifies the storage account type for the managed disk. NOTE: UltraSSD_LRS can only be used
* with data disks, it cannot be used with OS Disk. Possible values include: 'Standard_LRS',
* 'Premium_LRS', 'StandardSSD_LRS', 'UltraSSD_LRS'
*/
storageAccountType?: string;
}
/**
* Describes a storage profile.
*/
export interface ImageStorageProfile {
/**
* Specifies information about the operating system disk used by the virtual machine. <br><br>
* For more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
osDisk?: ImageOSDisk;
/**
* Specifies the parameters that are used to add a data disk to a virtual machine. <br><br> For
* more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
dataDisks?: ImageDataDisk[];
/**
* Specifies whether an image is zone resilient or not. Default is false. Zone resilient images
* can be created only in regions that provide Zone Redundant Storage (ZRS).
*/
zoneResilient?: boolean;
}
/**
* The source user image virtual hard disk. The virtual hard disk will be copied before being
* attached to the virtual machine. If SourceImage is provided, the destination virtual hard drive
* must not exist.
*/
export interface Image extends Resource {
/**
* The source virtual machine from which Image is created.
*/
sourceVirtualMachine?: SubResource;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: ImageStorageProfile;
/**
* The provisioning state.
*/
readonly provisioningState?: string;
}
/**
* The source user image virtual hard disk. Only tags may be updated.
*/
export interface ImageUpdate extends UpdateResource {
/**
* The source virtual machine from which Image is created.
*/
sourceVirtualMachine?: SubResource;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: ImageStorageProfile;
/**
* The provisioning state.
*/
readonly provisioningState?: string;
}
export interface VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue {
/**
* The principal id of user assigned identity.
*/
readonly principalId?: string;
/**
* The client id of user assigned identity.
*/
readonly clientId?: string;
}
/**
* Identity for the virtual machine scale set.
*/
export interface VirtualMachineScaleSetIdentity {
/**
* The principal id of virtual machine scale set identity. This property will only be provided
* for a system assigned identity.
*/
readonly principalId?: string;
/**
* The tenant id associated with the virtual machine scale set. This property will only be
* provided for a system assigned identity.
*/
readonly tenantId?: string;
/**
* The type of identity used for the virtual machine scale set. The type 'SystemAssigned,
* UserAssigned' includes both an implicitly created identity and a set of user assigned
* identities. The type 'None' will remove any identities from the virtual machine scale set.
* Possible values include: 'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned',
* 'None'
*/
type?: string;
/**
* The list of user identities associated with the virtual machine scale set. The user identity
* dictionary key references will be ARM resource ids in the form:
* '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
*/
userAssignedIdentities?: { [propertyName: string]:
VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue };
}
/**
* Describes a virtual machine scale set OS profile.
*/
export interface VirtualMachineScaleSetOSProfile {
/**
* Specifies the computer name prefix for all of the virtual machines in the scale set. Computer
* name prefixes must be 1 to 15 characters long.
*/
computerNamePrefix?: string;
/**
* Specifies the name of the administrator account. <br><br> **Windows-only restriction:** Cannot
* end in "." <br><br> **Disallowed values:** "administrator", "admin", "user", "user1", "test",
* "user2", "test1", "user3", "admin1", "1", "123", "a", "actuser", "adm", "admin2", "aspnet",
* "backup", "console", "david", "guest", "john", "owner", "root", "server", "sql", "support",
* "support_388945a0", "sys", "test2", "test3", "user4", "user5". <br><br> **Minimum-length
* (Linux):** 1 character <br><br> **Max-length (Linux):** 64 characters <br><br> **Max-length
* (Windows):** 20 characters <br><br><li> For root access to the Linux VM, see [Using root
* privileges on Linux virtual machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-use-root-privileges?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)<br><li>
* For a list of built-in system users on Linux that should not be used in this field, see
* [Selecting User Names for Linux on
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-usernames?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
*/
adminUsername?: string;
/**
* Specifies the password of the administrator account. <br><br> **Minimum-length (Windows):** 8
* characters <br><br> **Minimum-length (Linux):** 6 characters <br><br> **Max-length
* (Windows):** 123 characters <br><br> **Max-length (Linux):** 72 characters <br><br>
* **Complexity requirements:** 3 out of 4 conditions below need to be fulfilled <br> Has lower
* characters <br>Has upper characters <br> Has a digit <br> Has a special character (Regex match
* [\W_]) <br><br> **Disallowed values:** "abc@123", "P@$$w0rd", "P@ssw0rd", "P@ssword123",
* "Pa$$word", "pass@word1", "Password!", "Password1", "Password22", "iloveyou!" <br><br> For
* resetting the password, see [How to reset the Remote Desktop service or its login password in
* a Windows
* VM](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-reset-rdp?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> For resetting root password, see [Manage users, SSH, and check or repair disks on
* Azure Linux VMs using the VMAccess
* Extension](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-vmaccess-extension?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json#reset-root-password)
*/
adminPassword?: string;
/**
* Specifies a base-64 encoded string of custom data. The base-64 encoded string is decoded to a
* binary array that is saved as a file on the Virtual Machine. The maximum length of the binary
* array is 65535 bytes. <br><br> For using cloud-init for your VM, see [Using cloud-init to
* customize a Linux VM during
* creation](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-using-cloud-init?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
*/
customData?: string;
/**
* Specifies Windows operating system settings on the virtual machine.
*/
windowsConfiguration?: WindowsConfiguration;
/**
* Specifies the Linux operating system settings on the virtual machine. <br><br>For a list of
* supported Linux distributions, see [Linux on Azure-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-endorsed-distros?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json)
* <br><br> For running non-endorsed distributions, see [Information for Non-Endorsed
* Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-create-upload-generic?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json).
*/
linuxConfiguration?: LinuxConfiguration;
/**
* Specifies set of certificates that should be installed onto the virtual machines in the scale
* set.
*/
secrets?: VaultSecretGroup[];
}
/**
* Describes a virtual machine scale set OS profile.
*/
export interface VirtualMachineScaleSetUpdateOSProfile {
/**
* A base-64 encoded string of custom data.
*/
customData?: string;
/**
* The Windows Configuration of the OS profile.
*/
windowsConfiguration?: WindowsConfiguration;
/**
* The Linux Configuration of the OS profile.
*/
linuxConfiguration?: LinuxConfiguration;
/**
* The List of certificates for addition to the VM.
*/
secrets?: VaultSecretGroup[];
}
/**
* Describes the parameters of a ScaleSet managed disk.
*/
export interface VirtualMachineScaleSetManagedDiskParameters {
/**
* Specifies the storage account type for the managed disk. NOTE: UltraSSD_LRS can only be used
* with data disks, it cannot be used with OS Disk. Possible values include: 'Standard_LRS',
* 'Premium_LRS', 'StandardSSD_LRS', 'UltraSSD_LRS'
*/
storageAccountType?: string;
}
/**
* Describes a virtual machine scale set operating system disk.
*/
export interface VirtualMachineScaleSetOSDisk {
/**
* The disk name.
*/
name?: string;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies whether writeAccelerator should be enabled or disabled on the disk.
*/
writeAcceleratorEnabled?: boolean;
/**
* Specifies how the virtual machines in the scale set should be created.<br><br> The only
* allowed value is: **FromImage** \u2013 This value is used when you are using an image to
* create the virtual machine. If you are using a platform image, you also use the imageReference
* element described above. If you are using a marketplace image, you also use the plan element
* previously described. Possible values include: 'FromImage', 'Empty', 'Attach'
*/
createOption: string;
/**
* Specifies the ephemeral disk Settings for the operating system disk used by the virtual
* machine scale set.
*/
diffDiskSettings?: DiffDiskSettings;
/**
* Specifies the size of the operating system disk in gigabytes. This element can be used to
* overwrite the size of the disk in a virtual machine image. <br><br> This value cannot be
* larger than 1023 GB
*/
diskSizeGB?: number;
/**
* This property allows you to specify the type of the OS that is included in the disk if
* creating a VM from user-image or a specialized VHD. <br><br> Possible values are: <br><br>
* **Windows** <br><br> **Linux**. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* Specifies information about the unmanaged user image to base the scale set on.
*/
image?: VirtualHardDisk;
/**
* Specifies the container urls that are used to store operating system disks for the scale set.
*/
vhdContainers?: string[];
/**
* The managed disk parameters.
*/
managedDisk?: VirtualMachineScaleSetManagedDiskParameters;
}
/**
* Describes virtual machine scale set operating system disk Update Object. This should be used for
* Updating VMSS OS Disk.
*/
export interface VirtualMachineScaleSetUpdateOSDisk {
/**
* The caching type. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies whether writeAccelerator should be enabled or disabled on the disk.
*/
writeAcceleratorEnabled?: boolean;
/**
* Specifies the size of the operating system disk in gigabytes. This element can be used to
* overwrite the size of the disk in a virtual machine image. <br><br> This value cannot be
* larger than 1023 GB
*/
diskSizeGB?: number;
/**
* The Source User Image VirtualHardDisk. This VirtualHardDisk will be copied before using it to
* attach to the Virtual Machine. If SourceImage is provided, the destination VirtualHardDisk
* should not exist.
*/
image?: VirtualHardDisk;
/**
* The list of virtual hard disk container uris.
*/
vhdContainers?: string[];
/**
* The managed disk parameters.
*/
managedDisk?: VirtualMachineScaleSetManagedDiskParameters;
}
/**
* Describes a virtual machine scale set data disk.
*/
export interface VirtualMachineScaleSetDataDisk {
/**
* The disk name.
*/
name?: string;
/**
* Specifies the logical unit number of the data disk. This value is used to identify data disks
* within the VM and therefore must be unique for each data disk attached to a VM.
*/
lun: number;
/**
* Specifies the caching requirements. <br><br> Possible values are: <br><br> **None** <br><br>
* **ReadOnly** <br><br> **ReadWrite** <br><br> Default: **None for Standard storage. ReadOnly
* for Premium storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
*/
caching?: string;
/**
* Specifies whether writeAccelerator should be enabled or disabled on the disk.
*/
writeAcceleratorEnabled?: boolean;
/**
* The create option. Possible values include: 'FromImage', 'Empty', 'Attach'
*/
createOption: string;
/**
* Specifies the size of an empty data disk in gigabytes. This element can be used to overwrite
* the size of the disk in a virtual machine image. <br><br> This value cannot be larger than
* 1023 GB
*/
diskSizeGB?: number;
/**
* The managed disk parameters.
*/
managedDisk?: VirtualMachineScaleSetManagedDiskParameters;
}
/**
* Describes a virtual machine scale set storage profile.
*/
export interface VirtualMachineScaleSetStorageProfile {
/**
* Specifies information about the image to use. You can specify information about platform
* images, marketplace images, or virtual machine images. This element is required when you want
* to use a platform image, marketplace image, or virtual machine image, but is not used in other
* creation operations.
*/
imageReference?: ImageReference;
/**
* Specifies information about the operating system disk used by the virtual machines in the
* scale set. <br><br> For more information about disks, see [About disks and VHDs for Azure
* virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
osDisk?: VirtualMachineScaleSetOSDisk;
/**
* Specifies the parameters that are used to add data disks to the virtual machines in the scale
* set. <br><br> For more information about disks, see [About disks and VHDs for Azure virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
*/
dataDisks?: VirtualMachineScaleSetDataDisk[];
}
/**
* Describes a virtual machine scale set storage profile.
*/
export interface VirtualMachineScaleSetUpdateStorageProfile {
/**
* The image reference.
*/
imageReference?: ImageReference;
/**
* The OS disk.
*/
osDisk?: VirtualMachineScaleSetUpdateOSDisk;
/**
* The data disks.
*/
dataDisks?: VirtualMachineScaleSetDataDisk[];
}
/**
* The API entity reference.
*/
export interface ApiEntityReference {
/**
* The ARM resource id in the form of
* /subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/...
*/
id?: string;
}
/**
* Describes a virtual machines scale sets network configuration's DNS settings.
*/
export interface VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings {
/**
* The Domain name label.The concatenation of the domain name label and vm index will be the
* domain name labels of the PublicIPAddress resources that will be created
*/
domainNameLabel: string;
}
/**
* Contains the IP tag associated with the public IP address.
*/
export interface VirtualMachineScaleSetIpTag {
/**
* IP tag type. Example: FirstPartyUsage.
*/
ipTagType?: string;
/**
* IP tag associated with the public IP. Example: SQL, Storage etc.
*/
tag?: string;
}
/**
* Describes a virtual machines scale set IP Configuration's PublicIPAddress configuration
*/
export interface VirtualMachineScaleSetPublicIPAddressConfiguration {
/**
* The publicIP address configuration name.
*/
name: string;
/**
* The idle timeout of the public IP address.
*/
idleTimeoutInMinutes?: number;
/**
* The dns settings to be applied on the publicIP addresses .
*/
dnsSettings?: VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings;
/**
* The list of IP tags associated with the public IP address.
*/
ipTags?: VirtualMachineScaleSetIpTag[];
/**
* The PublicIPPrefix from which to allocate publicIP addresses.
*/
publicIPPrefix?: SubResource;
}
/**
* Describes a virtual machines scale set IP Configuration's PublicIPAddress configuration
*/
export interface VirtualMachineScaleSetUpdatePublicIPAddressConfiguration {
/**
* The publicIP address configuration name.
*/
name?: string;
/**
* The idle timeout of the public IP address.
*/
idleTimeoutInMinutes?: number;
/**
* The dns settings to be applied on the publicIP addresses .
*/
dnsSettings?: VirtualMachineScaleSetPublicIPAddressConfigurationDnsSettings;
}
/**
* Describes a virtual machine scale set network profile's IP configuration.
*/
export interface VirtualMachineScaleSetIPConfiguration extends SubResource {
/**
* The IP configuration name.
*/
name: string;
/**
* Specifies the identifier of the subnet.
*/
subnet?: ApiEntityReference;
/**
* Specifies the primary network interface in case the virtual machine has more than 1 network
* interface.
*/
primary?: boolean;
/**
* The publicIPAddressConfiguration.
*/
publicIPAddressConfiguration?: VirtualMachineScaleSetPublicIPAddressConfiguration;
/**
* Available from Api-Version 2017-03-30 onwards, it represents whether the specific
* ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and
* 'IPv6'. Possible values include: 'IPv4', 'IPv6'
*/
privateIPAddressVersion?: string;
/**
* Specifies an array of references to backend address pools of application gateways. A scale set
* can reference backend address pools of multiple application gateways. Multiple scale sets
* cannot use the same application gateway.
*/
applicationGatewayBackendAddressPools?: SubResource[];
/**
* Specifies an array of references to application security group.
*/
applicationSecurityGroups?: SubResource[];
/**
* Specifies an array of references to backend address pools of load balancers. A scale set can
* reference backend address pools of one public and one internal load balancer. Multiple scale
* sets cannot use the same load balancer.
*/
loadBalancerBackendAddressPools?: SubResource[];
/**
* Specifies an array of references to inbound Nat pools of the load balancers. A scale set can
* reference inbound nat pools of one public and one internal load balancer. Multiple scale sets
* cannot use the same load balancer
*/
loadBalancerInboundNatPools?: SubResource[];
}
/**
* Describes a virtual machine scale set network profile's IP configuration.
*/
export interface VirtualMachineScaleSetUpdateIPConfiguration extends SubResource {
/**
* The IP configuration name.
*/
name?: string;
/**
* The subnet.
*/
subnet?: ApiEntityReference;
/**
* Specifies the primary IP Configuration in case the network interface has more than one IP
* Configuration.
*/
primary?: boolean;
/**
* The publicIPAddressConfiguration.
*/
publicIPAddressConfiguration?: VirtualMachineScaleSetUpdatePublicIPAddressConfiguration;
/**
* Available from Api-Version 2017-03-30 onwards, it represents whether the specific
* ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and
* 'IPv6'. Possible values include: 'IPv4', 'IPv6'
*/
privateIPAddressVersion?: string;
/**
* The application gateway backend address pools.
*/
applicationGatewayBackendAddressPools?: SubResource[];
/**
* Specifies an array of references to application security group.
*/
applicationSecurityGroups?: SubResource[];
/**
* The load balancer backend address pools.
*/
loadBalancerBackendAddressPools?: SubResource[];
/**
* The load balancer inbound nat pools.
*/
loadBalancerInboundNatPools?: SubResource[];
}
/**
* Describes a virtual machines scale sets network configuration's DNS settings.
*/
export interface VirtualMachineScaleSetNetworkConfigurationDnsSettings {
/**
* List of DNS servers IP addresses
*/
dnsServers?: string[];
}
/**
* Describes a virtual machine scale set network profile's network configurations.
*/
export interface VirtualMachineScaleSetNetworkConfiguration extends SubResource {
/**
* The network configuration name.
*/
name: string;
/**
* Specifies the primary network interface in case the virtual machine has more than 1 network
* interface.
*/
primary?: boolean;
/**
* Specifies whether the network interface is accelerated networking-enabled.
*/
enableAcceleratedNetworking?: boolean;
/**
* The network security group.
*/
networkSecurityGroup?: SubResource;
/**
* The dns settings to be applied on the network interfaces.
*/
dnsSettings?: VirtualMachineScaleSetNetworkConfigurationDnsSettings;
/**
* Specifies the IP configurations of the network interface.
*/
ipConfigurations: VirtualMachineScaleSetIPConfiguration[];
/**
* Whether IP forwarding enabled on this NIC.
*/
enableIPForwarding?: boolean;
}
/**
* Describes a virtual machine scale set network profile's network configurations.
*/
export interface VirtualMachineScaleSetUpdateNetworkConfiguration extends SubResource {
/**
* The network configuration name.
*/
name?: string;
/**
* Whether this is a primary NIC on a virtual machine.
*/
primary?: boolean;
/**
* Specifies whether the network interface is accelerated networking-enabled.
*/
enableAcceleratedNetworking?: boolean;
/**
* The network security group.
*/
networkSecurityGroup?: SubResource;
/**
* The dns settings to be applied on the network interfaces.
*/
dnsSettings?: VirtualMachineScaleSetNetworkConfigurationDnsSettings;
/**
* The virtual machine scale set IP Configuration.
*/
ipConfigurations?: VirtualMachineScaleSetUpdateIPConfiguration[];
/**
* Whether IP forwarding enabled on this NIC.
*/
enableIPForwarding?: boolean;
}
/**
* Describes a virtual machine scale set network profile.
*/
export interface VirtualMachineScaleSetNetworkProfile {
/**
* A reference to a load balancer probe used to determine the health of an instance in the
* virtual machine scale set. The reference will be in the form:
* '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/probes/{probeName}'.
*/
healthProbe?: ApiEntityReference;
/**
* The list of network configurations.
*/
networkInterfaceConfigurations?: VirtualMachineScaleSetNetworkConfiguration[];
}
/**
* Describes a virtual machine scale set network profile.
*/
export interface VirtualMachineScaleSetUpdateNetworkProfile {
/**
* The list of network configurations.
*/
networkInterfaceConfigurations?: VirtualMachineScaleSetUpdateNetworkConfiguration[];
}
export interface SubResourceReadOnly extends BaseResource {
/**
* Resource Id
*/
readonly id?: string;
}
/**
* Describes a Virtual Machine Scale Set Extension.
*/
export interface VirtualMachineScaleSetExtension extends SubResourceReadOnly {
/**
* The name of the extension.
*/
name?: string;
/**
* If a value is provided and is different from the previous value, the extension handler will be
* forced to update even if the extension configuration has not changed.
*/
forceUpdateTag?: string;
/**
* The name of the extension handler publisher.
*/
publisher?: string;
/**
* Specifies the type of the extension; an example is "CustomScriptExtension".
*/
type?: string;
/**
* Specifies the version of the script handler.
*/
typeHandlerVersion?: string;
/**
* Indicates whether the extension should use a newer minor version if one is available at
* deployment time. Once deployed, however, the extension will not upgrade minor versions unless
* redeployed, even with this property set to true.
*/
autoUpgradeMinorVersion?: boolean;
/**
* Json formatted public settings for the extension.
*/
settings?: any;
/**
* The extension can contain either protectedSettings or protectedSettingsFromKeyVault or no
* protected settings at all.
*/
protectedSettings?: any;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* Collection of extension names after which this extension needs to be provisioned.
*/
provisionAfterExtensions?: string[];
}
/**
* Describes a virtual machine scale set extension profile.
*/
export interface VirtualMachineScaleSetExtensionProfile {
/**
* The virtual machine scale set child extension resources.
*/
extensions?: VirtualMachineScaleSetExtension[];
}
/**
* Describes a virtual machine scale set virtual machine profile.
*/
export interface VirtualMachineScaleSetVMProfile {
/**
* Specifies the operating system settings for the virtual machines in the scale set.
*/
osProfile?: VirtualMachineScaleSetOSProfile;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: VirtualMachineScaleSetStorageProfile;
/**
* Specifies additional capabilities enabled or disabled on the virtual machine in the scale set.
* For instance: whether the virtual machine has the capability to support attaching managed data
* disks with UltraSSD_LRS storage account type.
*/
additionalCapabilities?: AdditionalCapabilities;
/**
* Specifies properties of the network interfaces of the virtual machines in the scale set.
*/
networkProfile?: VirtualMachineScaleSetNetworkProfile;
/**
* Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
*/
diagnosticsProfile?: DiagnosticsProfile;
/**
* Specifies a collection of settings for extensions installed on virtual machines in the scale
* set.
*/
extensionProfile?: VirtualMachineScaleSetExtensionProfile;
/**
* Specifies that the image or disk that is being used was licensed on-premises. This element is
* only used for images that contain the Windows Server operating system. <br><br> Possible
* values are: <br><br> Windows_Client <br><br> Windows_Server <br><br> If this element is
* included in a request for an update, the value must match the initial value. This value cannot
* be updated. <br><br> For more information, see [Azure Hybrid Use Benefit for Windows
* Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Minimum api-version: 2015-06-15
*/
licenseType?: string;
/**
* Specifies the priority for the virtual machines in the scale set. <br><br>Minimum api-version:
* 2017-10-30-preview. Possible values include: 'Regular', 'Low'
*/
priority?: string;
/**
* Specifies the eviction policy for virtual machines in a low priority scale set.
* <br><br>Minimum api-version: 2017-10-30-preview. Possible values include: 'Deallocate',
* 'Delete'
*/
evictionPolicy?: string;
}
/**
* Describes a virtual machine scale set virtual machine profile.
*/
export interface VirtualMachineScaleSetUpdateVMProfile {
/**
* The virtual machine scale set OS profile.
*/
osProfile?: VirtualMachineScaleSetUpdateOSProfile;
/**
* The virtual machine scale set storage profile.
*/
storageProfile?: VirtualMachineScaleSetUpdateStorageProfile;
/**
* The virtual machine scale set network profile.
*/
networkProfile?: VirtualMachineScaleSetUpdateNetworkProfile;
/**
* The virtual machine scale set diagnostics profile.
*/
diagnosticsProfile?: DiagnosticsProfile;
/**
* The virtual machine scale set extension profile.
*/
extensionProfile?: VirtualMachineScaleSetExtensionProfile;
/**
* The license type, which is for bring your own license scenario.
*/
licenseType?: string;
}
/**
* Describes a Virtual Machine Scale Set.
*/
export interface VirtualMachineScaleSet extends Resource {
/**
* The virtual machine scale set sku.
*/
sku?: Sku;
/**
* Specifies information about the marketplace image used to create the virtual machine. This
* element is only used for marketplace images. Before you can use a marketplace image from an
* API, you must enable the image for programmatic use. In the Azure portal, find the
* marketplace image that you want to use and then click **Want to deploy programmatically, Get
* Started ->**. Enter any required information and then click **Save**.
*/
plan?: Plan;
/**
* The upgrade policy.
*/
upgradePolicy?: UpgradePolicy;
/**
* The virtual machine profile.
*/
virtualMachineProfile?: VirtualMachineScaleSetVMProfile;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* Specifies whether the Virtual Machine Scale Set should be overprovisioned.
*/
overprovision?: boolean;
/**
* When Overprovision is enabled, extensions are launched only on the requested number of VMs
* which are finally kept. This property will hence ensure that the extensions do not run on the
* extra overprovisioned VMs.
*/
doNotRunExtensionsOnOverprovisionedVMs?: boolean;
/**
* Specifies the ID which uniquely identifies a Virtual Machine Scale Set.
*/
readonly uniqueId?: string;
/**
* When true this limits the scale set to a single placement group, of max size 100 virtual
* machines.
*/
singlePlacementGroup?: boolean;
/**
* Whether to force strictly even Virtual Machine distribution cross x-zones in case there is
* zone outage.
*/
zoneBalance?: boolean;
/**
* Fault Domain count for each placement group.
*/
platformFaultDomainCount?: number;
/**
* The identity of the virtual machine scale set, if configured.
*/
identity?: VirtualMachineScaleSetIdentity;
/**
* The virtual machine scale set zones.
*/
zones?: string[];
}
/**
* Describes a Virtual Machine Scale Set VM Reimage Parameters.
*/
export interface VirtualMachineScaleSetVMReimageParameters extends VirtualMachineReimageParameters
{
}
/**
* Describes a Virtual Machine Scale Set VM Reimage Parameters.
*/
export interface VirtualMachineScaleSetReimageParameters extends
VirtualMachineScaleSetVMReimageParameters {
/**
* The virtual machine scale set instance ids. Omitting the virtual machine scale set instance
* ids will result in the operation being performed on all virtual machines in the virtual
* machine scale set.
*/
instanceIds?: string[];
}
/**
* Describes a Virtual Machine Scale Set.
*/
export interface VirtualMachineScaleSetUpdate extends UpdateResource {
/**
* The virtual machine scale set sku.
*/
sku?: Sku;
/**
* The purchase plan when deploying a virtual machine scale set from VM Marketplace images.
*/
plan?: Plan;
/**
* The upgrade policy.
*/
upgradePolicy?: UpgradePolicy;
/**
* The virtual machine profile.
*/
virtualMachineProfile?: VirtualMachineScaleSetUpdateVMProfile;
/**
* Specifies whether the Virtual Machine Scale Set should be overprovisioned.
*/
overprovision?: boolean;
/**
* When true this limits the scale set to a single placement group, of max size 100 virtual
* machines.
*/
singlePlacementGroup?: boolean;
/**
* The identity of the virtual machine scale set, if configured.
*/
identity?: VirtualMachineScaleSetIdentity;
}
/**
* Specifies a list of virtual machine instance IDs from the VM scale set.
*/
export interface VirtualMachineScaleSetVMInstanceIDs {
/**
* The virtual machine scale set instance ids. Omitting the virtual machine scale set instance
* ids will result in the operation being performed on all virtual machines in the virtual
* machine scale set.
*/
instanceIds?: string[];
}
/**
* Specifies a list of virtual machine instance IDs from the VM scale set.
*/
export interface VirtualMachineScaleSetVMInstanceRequiredIDs {
/**
* The virtual machine scale set instance ids.
*/
instanceIds: string[];
}
/**
* The status code and count of the virtual machine scale set instance view status summary.
*/
export interface VirtualMachineStatusCodeCount {
/**
* The instance view status code.
*/
readonly code?: string;
/**
* The number of instances having a particular status code.
*/
readonly count?: number;
}
/**
* Instance view statuses summary for virtual machines of a virtual machine scale set.
*/
export interface VirtualMachineScaleSetInstanceViewStatusesSummary {
/**
* The extensions information.
*/
readonly statusesSummary?: VirtualMachineStatusCodeCount[];
}
/**
* Extensions summary for virtual machines of a virtual machine scale set.
*/
export interface VirtualMachineScaleSetVMExtensionsSummary {
/**
* The extension name.
*/
readonly name?: string;
/**
* The extensions information.
*/
readonly statusesSummary?: VirtualMachineStatusCodeCount[];
}
/**
* The instance view of a virtual machine scale set.
*/
export interface VirtualMachineScaleSetInstanceView {
/**
* The instance view status summary for the virtual machine scale set.
*/
readonly virtualMachine?: VirtualMachineScaleSetInstanceViewStatusesSummary;
/**
* The extensions information.
*/
readonly extensions?: VirtualMachineScaleSetVMExtensionsSummary[];
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
}
/**
* Describes scaling information of a sku.
*/
export interface VirtualMachineScaleSetSkuCapacity {
/**
* The minimum capacity.
*/
readonly minimum?: number;
/**
* The maximum capacity that can be set.
*/
readonly maximum?: number;
/**
* The default capacity.
*/
readonly defaultCapacity?: number;
/**
* The scale type applicable to the sku. Possible values include: 'Automatic', 'None'
*/
readonly scaleType?: string;
}
/**
* Describes an available virtual machine scale set sku.
*/
export interface VirtualMachineScaleSetSku {
/**
* The type of resource the sku applies to.
*/
readonly resourceType?: string;
/**
* The Sku.
*/
readonly sku?: Sku;
/**
* Specifies the number of virtual machines in the scale set.
*/
readonly capacity?: VirtualMachineScaleSetSkuCapacity;
}
/**
* Api error base.
*/
export interface ApiErrorBase {
/**
* The error code.
*/
code?: string;
/**
* The target of the particular error.
*/
target?: string;
/**
* The error message.
*/
message?: string;
}
/**
* Inner error details.
*/
export interface InnerError {
/**
* The exception type.
*/
exceptiontype?: string;
/**
* The internal error message or exception dump.
*/
errordetail?: string;
}
/**
* Api error.
*/
export interface ApiError {
/**
* The Api error details
*/
details?: ApiErrorBase[];
/**
* The Api inner error
*/
innererror?: InnerError;
/**
* The error code.
*/
code?: string;
/**
* The target of the particular error.
*/
target?: string;
/**
* The error message.
*/
message?: string;
}
/**
* Information about rollback on failed VM instances after a OS Upgrade operation.
*/
export interface RollbackStatusInfo {
/**
* The number of instances which have been successfully rolled back.
*/
readonly successfullyRolledbackInstanceCount?: number;
/**
* The number of instances which failed to rollback.
*/
readonly failedRolledbackInstanceCount?: number;
/**
* Error details if OS rollback failed.
*/
readonly rollbackError?: ApiError;
}
/**
* Information about the current running state of the overall upgrade.
*/
export interface UpgradeOperationHistoryStatus {
/**
* Code indicating the current status of the upgrade. Possible values include: 'RollingForward',
* 'Cancelled', 'Completed', 'Faulted'
*/
readonly code?: string;
/**
* Start time of the upgrade.
*/
readonly startTime?: Date;
/**
* End time of the upgrade.
*/
readonly endTime?: Date;
}
/**
* Information about the number of virtual machine instances in each upgrade state.
*/
export interface RollingUpgradeProgressInfo {
/**
* The number of instances that have been successfully upgraded.
*/
readonly successfulInstanceCount?: number;
/**
* The number of instances that have failed to be upgraded successfully.
*/
readonly failedInstanceCount?: number;
/**
* The number of instances that are currently being upgraded.
*/
readonly inProgressInstanceCount?: number;
/**
* The number of instances that have not yet begun to be upgraded.
*/
readonly pendingInstanceCount?: number;
}
/**
* Describes each OS upgrade on the Virtual Machine Scale Set.
*/
export interface UpgradeOperationHistoricalStatusInfoProperties {
/**
* Information about the overall status of the upgrade operation.
*/
readonly runningStatus?: UpgradeOperationHistoryStatus;
/**
* Counts of the VMs in each state.
*/
readonly progress?: RollingUpgradeProgressInfo;
/**
* Error Details for this upgrade if there are any.
*/
readonly error?: ApiError;
/**
* Invoker of the Upgrade Operation. Possible values include: 'Unknown', 'User', 'Platform'
*/
readonly startedBy?: string;
/**
* Image Reference details
*/
readonly targetImageReference?: ImageReference;
/**
* Information about OS rollback if performed
*/
readonly rollbackInfo?: RollbackStatusInfo;
}
/**
* Virtual Machine Scale Set OS Upgrade History operation response.
*/
export interface UpgradeOperationHistoricalStatusInfo {
/**
* Information about the properties of the upgrade operation.
*/
readonly properties?: UpgradeOperationHistoricalStatusInfoProperties;
/**
* Resource type
*/
readonly type?: string;
/**
* Resource location
*/
readonly location?: string;
}
/**
* The health status of the VM.
*/
export interface VirtualMachineHealthStatus {
/**
* The health status information for the VM.
*/
readonly status?: InstanceViewStatus;
}
/**
* The instance view of a virtual machine scale set VM.
*/
export interface VirtualMachineScaleSetVMInstanceView {
/**
* The Update Domain count.
*/
platformUpdateDomain?: number;
/**
* The Fault Domain count.
*/
platformFaultDomain?: number;
/**
* The Remote desktop certificate thumbprint.
*/
rdpThumbPrint?: string;
/**
* The VM Agent running on the virtual machine.
*/
vmAgent?: VirtualMachineAgentInstanceView;
/**
* The Maintenance Operation status on the virtual machine.
*/
maintenanceRedeployStatus?: MaintenanceRedeployStatus;
/**
* The disks information.
*/
disks?: DiskInstanceView[];
/**
* The extensions information.
*/
extensions?: VirtualMachineExtensionInstanceView[];
/**
* The health status for the VM.
*/
readonly vmHealth?: VirtualMachineHealthStatus;
/**
* Boot Diagnostics is a debugging feature which allows you to view Console Output and Screenshot
* to diagnose VM status. <br><br> You can easily view the output of your console log. <br><br>
* Azure also enables you to see a screenshot of the VM from the hypervisor.
*/
bootDiagnostics?: BootDiagnosticsInstanceView;
/**
* The resource status information.
*/
statuses?: InstanceViewStatus[];
/**
* The placement group in which the VM is running. If the VM is deallocated it will not have a
* placementGroupId.
*/
placementGroupId?: string;
}
/**
* Describes a virtual machine scale set VM network profile.
*/
export interface VirtualMachineScaleSetVMNetworkProfileConfiguration {
/**
* The list of network configurations.
*/
networkInterfaceConfigurations?: VirtualMachineScaleSetNetworkConfiguration[];
}
/**
* The protection policy of a virtual machine scale set VM.
*/
export interface VirtualMachineScaleSetVMProtectionPolicy {
/**
* Indicates that the virtual machine scale set VM shouldn't be considered for deletion during a
* scale-in operation.
*/
protectFromScaleIn?: boolean;
/**
* Indicates that model updates or actions (including scale-in) initiated on the virtual machine
* scale set should not be applied to the virtual machine scale set VM.
*/
protectFromScaleSetActions?: boolean;
}
/**
* Describes a virtual machine scale set virtual machine.
*/
export interface VirtualMachineScaleSetVM extends Resource {
/**
* The virtual machine instance ID.
*/
readonly instanceId?: string;
/**
* The virtual machine SKU.
*/
readonly sku?: Sku;
/**
* Specifies whether the latest model has been applied to the virtual machine.
*/
readonly latestModelApplied?: boolean;
/**
* Azure VM unique ID.
*/
readonly vmId?: string;
/**
* The virtual machine instance view.
*/
readonly instanceView?: VirtualMachineScaleSetVMInstanceView;
/**
* Specifies the hardware settings for the virtual machine.
*/
hardwareProfile?: HardwareProfile;
/**
* Specifies the storage settings for the virtual machine disks.
*/
storageProfile?: StorageProfile;
/**
* Specifies additional capabilities enabled or disabled on the virtual machine in the scale set.
* For instance: whether the virtual machine has the capability to support attaching managed data
* disks with UltraSSD_LRS storage account type.
*/
additionalCapabilities?: AdditionalCapabilities;
/**
* Specifies the operating system settings for the virtual machine.
*/
osProfile?: OSProfile;
/**
* Specifies the network interfaces of the virtual machine.
*/
networkProfile?: NetworkProfile;
/**
* Specifies the network profile configuration of the virtual machine.
*/
networkProfileConfiguration?: VirtualMachineScaleSetVMNetworkProfileConfiguration;
/**
* Specifies the boot diagnostic settings state. <br><br>Minimum api-version: 2015-06-15.
*/
diagnosticsProfile?: DiagnosticsProfile;
/**
* Specifies information about the availability set that the virtual machine should be assigned
* to. Virtual machines specified in the same availability set are allocated to different nodes
* to maximize availability. For more information about availability sets, see [Manage the
* availability of virtual
* machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
* <br><br> For more information on Azure planned maintenance, see [Planned maintenance for
* virtual machines in
* Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Currently, a VM can only be added to availability set at creation time. An existing
* VM cannot be added to an availability set.
*/
availabilitySet?: SubResource;
/**
* The provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* Specifies that the image or disk that is being used was licensed on-premises. This element is
* only used for images that contain the Windows Server operating system. <br><br> Possible
* values are: <br><br> Windows_Client <br><br> Windows_Server <br><br> If this element is
* included in a request for an update, the value must match the initial value. This value cannot
* be updated. <br><br> For more information, see [Azure Hybrid Use Benefit for Windows
* Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
* <br><br> Minimum api-version: 2015-06-15
*/
licenseType?: string;
/**
* Specifies whether the model applied to the virtual machine is the model of the virtual machine
* scale set or the customized model for the virtual machine.
*/
readonly modelDefinitionApplied?: string;
/**
* Specifies the protection policy of the virtual machine.
*/
protectionPolicy?: VirtualMachineScaleSetVMProtectionPolicy;
/**
* Specifies information about the marketplace image used to create the virtual machine. This
* element is only used for marketplace images. Before you can use a marketplace image from an
* API, you must enable the image for programmatic use. In the Azure portal, find the
* marketplace image that you want to use and then click **Want to deploy programmatically, Get
* Started ->**. Enter any required information and then click **Save**.
*/
plan?: Plan;
/**
* The virtual machine child extension resources.
*/
readonly resources?: VirtualMachineExtension[];
/**
* The virtual machine zones.
*/
readonly zones?: string[];
}
/**
* Information about the current running state of the overall upgrade.
*/
export interface RollingUpgradeRunningStatus {
/**
* Code indicating the current status of the upgrade. Possible values include: 'RollingForward',
* 'Cancelled', 'Completed', 'Faulted'
*/
readonly code?: string;
/**
* Start time of the upgrade.
*/
readonly startTime?: Date;
/**
* The last action performed on the rolling upgrade. Possible values include: 'Start', 'Cancel'
*/
readonly lastAction?: string;
/**
* Last action time of the upgrade.
*/
readonly lastActionTime?: Date;
}
/**
* The status of the latest virtual machine scale set rolling upgrade.
*/
export interface RollingUpgradeStatusInfo extends Resource {
/**
* The rolling upgrade policies applied for this upgrade.
*/
readonly policy?: RollingUpgradePolicy;
/**
* Information about the current running state of the overall upgrade.
*/
readonly runningStatus?: RollingUpgradeRunningStatus;
/**
* Information about the number of virtual machine instances in each upgrade state.
*/
readonly progress?: RollingUpgradeProgressInfo;
/**
* Error details for this upgrade, if there are any.
*/
readonly error?: ApiError;
}
/**
* Response after calling a manual recovery walk
*/
export interface RecoveryWalkResponse {
/**
* Whether the recovery walk was performed
*/
readonly walkPerformed?: boolean;
/**
* The next update domain that needs to be walked. Null means walk spanning all update domains
* has been completed
*/
readonly nextPlatformUpdateDomain?: number;
}
/**
* Api input base class for LogAnalytics Api.
*/
export interface LogAnalyticsInputBase {
/**
* SAS Uri of the logging blob container to which LogAnalytics Api writes output logs to.
*/
blobContainerSasUri: string;
/**
* From time of the query
*/
fromTime: Date;
/**
* To time of the query
*/
toTime: Date;
/**
* Group query result by Throttle Policy applied.
*/
groupByThrottlePolicy?: boolean;
/**
* Group query result by Operation Name.
*/
groupByOperationName?: boolean;
/**
* Group query result by Resource Name.
*/
groupByResourceName?: boolean;
}
/**
* Api request input for LogAnalytics getRequestRateByInterval Api.
*/
export interface RequestRateByIntervalInput extends LogAnalyticsInputBase {
/**
* Interval value in minutes used to create LogAnalytics call rate logs. Possible values include:
* 'ThreeMins', 'FiveMins', 'ThirtyMins', 'SixtyMins'
*/
intervalLength: string;
}
/**
* Api request input for LogAnalytics getThrottledRequests Api.
*/
export interface ThrottledRequestsInput extends LogAnalyticsInputBase {
}
/**
* LogAnalytics output properties
*/
export interface LogAnalyticsOutput {
/**
* Output file Uri path to blob container.
*/
readonly output?: string;
}
/**
* LogAnalytics operation status response
*/
export interface LogAnalyticsOperationResult {
/**
* LogAnalyticsOutput
*/
readonly properties?: LogAnalyticsOutput;
}
export interface VMScaleSetConvertToSinglePlacementGroupInput {
/**
* Id of the placement group in which you want future virtual machine instances to be placed. To
* query placement group Id, please use Virtual Machine Scale Set VMs - Get API. If not provided,
* the platform will choose one with maximum number of virtual machine instances.
*/
activePlacementGroupId?: string;
}
/**
* Describes the properties of a run command parameter.
*/
export interface RunCommandInputParameter {
/**
* The run command parameter name.
*/
name: string;
/**
* The run command parameter value.
*/
value: string;
}
/**
* Capture Virtual Machine parameters.
*/
export interface RunCommandInput {
/**
* The run command id.
*/
commandId: string;
/**
* Optional. The script to be executed. When this value is given, the given script will override
* the default script of the command.
*/
script?: string[];
/**
* The run command parameters.
*/
parameters?: RunCommandInputParameter[];
}
/**
* Describes the properties of a run command parameter.
*/
export interface RunCommandParameterDefinition {
/**
* The run command parameter name.
*/
name: string;
/**
* The run command parameter type.
*/
type: string;
/**
* The run command parameter default value.
*/
defaultValue?: string;
/**
* The run command parameter required.
*/
required?: boolean;
}
/**
* Describes the properties of a Run Command metadata.
*/
export interface RunCommandDocumentBase {
/**
* The VM run command schema.
*/
schema: string;
/**
* The VM run command id.
*/
id: string;
/**
* The Operating System type. Possible values include: 'Windows', 'Linux'
*/
osType: string;
/**
* The VM run command label.
*/
label: string;
/**
* The VM run command description.
*/
description: string;
}
/**
* Describes the properties of a Run Command.
*/
export interface RunCommandDocument extends RunCommandDocumentBase {
/**
* The script to be executed.
*/
script: string[];
/**
* The parameters used by the script.
*/
parameters?: RunCommandParameterDefinition[];
}
export interface RunCommandResult {
/**
* Run command operation response.
*/
value?: InstanceViewStatus[];
}
/**
* Describes scaling information of a SKU.
*/
export interface ResourceSkuCapacity {
/**
* The minimum capacity.
*/
readonly minimum?: number;
/**
* The maximum capacity that can be set.
*/
readonly maximum?: number;
/**
* The default capacity.
*/
readonly default?: number;
/**
* The scale type applicable to the sku. Possible values include: 'Automatic', 'Manual', 'None'
*/
readonly scaleType?: string;
}
/**
* Describes metadata for retrieving price info.
*/
export interface ResourceSkuCosts {
/**
* Used for querying price from commerce.
*/
readonly meterID?: string;
/**
* The multiplier is needed to extend the base metered cost.
*/
readonly quantity?: number;
/**
* An invariant to show the extended unit.
*/
readonly extendedUnit?: string;
}
/**
* Describes The SKU capabilities object.
*/
export interface ResourceSkuCapabilities {
/**
* An invariant to describe the feature.
*/
readonly name?: string;
/**
* An invariant if the feature is measured by quantity.
*/
readonly value?: string;
}
export interface ResourceSkuRestrictionInfo {
/**
* Locations where the SKU is restricted
*/
readonly locations?: string[];
/**
* List of availability zones where the SKU is restricted.
*/
readonly zones?: string[];
}
/**
* Describes scaling information of a SKU.
*/
export interface ResourceSkuRestrictions {
/**
* The type of restrictions. Possible values include: 'Location', 'Zone'
*/
readonly type?: string;
/**
* The value of restrictions. If the restriction type is set to location. This would be different
* locations where the SKU is restricted.
*/
readonly values?: string[];
/**
* The information about the restriction where the SKU cannot be used.
*/
readonly restrictionInfo?: ResourceSkuRestrictionInfo;
/**
* The reason for restriction. Possible values include: 'QuotaId', 'NotAvailableForSubscription'
*/
readonly reasonCode?: string;
}
export interface ResourceSkuLocationInfo {
/**
* Location of the SKU
*/
readonly location?: string;
/**
* List of availability zones where the SKU is supported.
*/
readonly zones?: string[];
}
/**
* Describes an available Compute SKU.
*/
export interface ResourceSku {
/**
* The type of resource the SKU applies to.
*/
readonly resourceType?: string;
/**
* The name of SKU.
*/
readonly name?: string;
/**
* Specifies the tier of virtual machines in a scale set.<br /><br /> Possible Values:<br /><br
* /> **Standard**<br /><br /> **Basic**
*/
readonly tier?: string;
/**
* The Size of the SKU.
*/
readonly size?: string;
/**
* The Family of this particular SKU.
*/
readonly family?: string;
/**
* The Kind of resources that are supported in this SKU.
*/
readonly kind?: string;
/**
* Specifies the number of virtual machines in the scale set.
*/
readonly capacity?: ResourceSkuCapacity;
/**
* The set of locations that the SKU is available.
*/
readonly locations?: string[];
/**
* A list of locations and availability zones in those locations where the SKU is available.
*/
readonly locationInfo?: ResourceSkuLocationInfo[];
/**
* The api versions that support this SKU.
*/
readonly apiVersions?: string[];
/**
* Metadata for retrieving price info.
*/
readonly costs?: ResourceSkuCosts[];
/**
* A name value pair to describe the capability.
*/
readonly capabilities?: ResourceSkuCapabilities[];
/**
* The restrictions because of which SKU cannot be used. This is empty if there are no
* restrictions.
*/
readonly restrictions?: ResourceSkuRestrictions[];
}
/**
* The disks sku name. Can be Standard_LRS, Premium_LRS, StandardSSD_LRS, or UltraSSD_LRS.
*/
export interface DiskSku {
/**
* The sku name. Possible values include: 'Standard_LRS', 'Premium_LRS', 'StandardSSD_LRS',
* 'UltraSSD_LRS'
*/
name?: string;
/**
* The sku tier.
*/
readonly tier?: string;
}
/**
* The source image used for creating the disk.
*/
export interface ImageDiskReference {
/**
* A relative uri containing either a Platform Image Repository or user image reference.
*/
id: string;
/**
* If the disk is created from an image's data disk, this is an index that indicates which of the
* data disks in the image to use. For OS disks, this field is null.
*/
lun?: number;
}
/**
* Data used when creating a disk.
*/
export interface CreationData {
/**
* This enumerates the possible sources of a disk's creation. Possible values include: 'Empty',
* 'Attach', 'FromImage', 'Import', 'Copy', 'Restore', 'Upload'
*/
createOption: string;
/**
* If createOption is Import, the Azure Resource Manager identifier of the storage account
* containing the blob to import as a disk. Required only if the blob is in a different
* subscription
*/
storageAccountId?: string;
/**
* Disk source information.
*/
imageReference?: ImageDiskReference;
/**
* If createOption is Import, this is the URI of a blob to be imported into a managed disk.
*/
sourceUri?: string;
/**
* If createOption is Copy, this is the ARM id of the source snapshot or disk.
*/
sourceResourceId?: string;
}
/**
* The vault id is an Azure Resource Manager Resource id in the form
* /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}
*/
export interface SourceVault {
/**
* Resource Id
*/
id?: string;
}
/**
* Key Vault Secret Url and vault id of the encryption key
*/
export interface KeyVaultAndSecretReference {
/**
* Resource id of the KeyVault containing the key or secret
*/
sourceVault: SourceVault;
/**
* Url pointing to a key or secret in KeyVault
*/
secretUrl: string;
}
/**
* Key Vault Key Url and vault id of KeK, KeK is optional and when provided is used to unwrap the
* encryptionKey
*/
export interface KeyVaultAndKeyReference {
/**
* Resource id of the KeyVault containing the key or secret
*/
sourceVault: SourceVault;
/**
* Url pointing to a key or secret in KeyVault
*/
keyUrl: string;
}
/**
* Encryption settings for one disk volume.
*/
export interface EncryptionSettingsElement {
/**
* Key Vault Secret Url and vault id of the disk encryption key
*/
diskEncryptionKey?: KeyVaultAndSecretReference;
/**
* Key Vault Key Url and vault id of the key encryption key. KeyEncryptionKey is optional and
* when provided is used to unwrap the disk encryption key.
*/
keyEncryptionKey?: KeyVaultAndKeyReference;
}
/**
* Encryption settings for disk or snapshot
*/
export interface EncryptionSettingsCollection {
/**
* Set this flag to true and provide DiskEncryptionKey and optional KeyEncryptionKey to enable
* encryption. Set this flag to false and remove DiskEncryptionKey and KeyEncryptionKey to
* disable encryption. If EncryptionSettings is null in the request object, the existing settings
* remain unchanged.
*/
enabled: boolean;
/**
* A collection of encryption settings, one for each disk volume.
*/
encryptionSettings?: EncryptionSettingsElement[];
}
/**
* Disk resource.
*/
export interface Disk extends Resource {
/**
* A relative URI containing the ID of the VM that has the disk attached.
*/
readonly managedBy?: string;
sku?: DiskSku;
/**
* The Logical zone list for Disk.
*/
zones?: string[];
/**
* The time when the disk was created.
*/
readonly timeCreated?: Date;
/**
* The Operating System type. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* The hypervisor generation of the Virtual Machine. Applicable to OS disks only. Possible values
* include: 'V1', 'V2'
*/
hyperVGeneration?: string;
/**
* Disk source information. CreationData information cannot be changed after the disk has been
* created.
*/
creationData: CreationData;
/**
* If creationData.createOption is Empty, this field is mandatory and it indicates the size of
* the VHD to create. If this field is present for updates or creation with other options, it
* indicates a resize. Resizes are only allowed if the disk is not attached to a running VM, and
* can only increase the disk's size.
*/
diskSizeGB?: number;
/**
* Encryption settings collection used for Azure Disk Encryption, can contain multiple encryption
* settings per disk or snapshot.
*/
encryptionSettingsCollection?: EncryptionSettingsCollection;
/**
* The disk provisioning state.
*/
readonly provisioningState?: string;
/**
* The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can
* transfer between 4k and 256k bytes.
*/
diskIOPSReadWrite?: number;
/**
* The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of
* bytes per second - MB here uses the ISO notation, of powers of 10.
*/
diskMBpsReadWrite?: number;
/**
* The state of the disk. Possible values include: 'Unattached', 'Attached', 'Reserved',
* 'ActiveSAS', 'ReadyToUpload', 'ActiveUpload'
*/
readonly diskState?: string;
}
/**
* Disk update resource.
*/
export interface DiskUpdate {
/**
* the Operating System type. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* If creationData.createOption is Empty, this field is mandatory and it indicates the size of
* the VHD to create. If this field is present for updates or creation with other options, it
* indicates a resize. Resizes are only allowed if the disk is not attached to a running VM, and
* can only increase the disk's size.
*/
diskSizeGB?: number;
/**
* Encryption settings collection used be Azure Disk Encryption, can contain multiple encryption
* settings per disk or snapshot.
*/
encryptionSettingsCollection?: EncryptionSettingsCollection;
/**
* The number of IOPS allowed for this disk; only settable for UltraSSD disks. One operation can
* transfer between 4k and 256k bytes.
*/
diskIOPSReadWrite?: number;
/**
* The bandwidth allowed for this disk; only settable for UltraSSD disks. MBps means millions of
* bytes per second - MB here uses the ISO notation, of powers of 10.
*/
diskMBpsReadWrite?: number;
/**
* Resource tags
*/
tags?: { [propertyName: string]: string };
sku?: DiskSku;
}
/**
* The snapshots sku name. Can be Standard_LRS, Premium_LRS, or Standard_ZRS.
*/
export interface SnapshotSku {
/**
* The sku name. Possible values include: 'Standard_LRS', 'Premium_LRS', 'Standard_ZRS'
*/
name?: string;
/**
* The sku tier.
*/
readonly tier?: string;
}
/**
* Data used for requesting a SAS.
*/
export interface GrantAccessData {
/**
* Possible values include: 'None', 'Read', 'Write'
*/
access: string;
/**
* Time duration in seconds until the SAS access expires.
*/
durationInSeconds: number;
}
/**
* A disk access SAS uri.
*/
export interface AccessUri {
/**
* A SAS uri for accessing a disk.
*/
readonly accessSAS?: string;
}
/**
* Snapshot resource.
*/
export interface Snapshot extends Resource {
/**
* Unused. Always Null.
*/
readonly managedBy?: string;
sku?: SnapshotSku;
/**
* The time when the disk was created.
*/
readonly timeCreated?: Date;
/**
* The Operating System type. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* The hypervisor generation of the Virtual Machine. Applicable to OS disks only. Possible values
* include: 'V1', 'V2'
*/
hyperVGeneration?: string;
/**
* Disk source information. CreationData information cannot be changed after the disk has been
* created.
*/
creationData: CreationData;
/**
* If creationData.createOption is Empty, this field is mandatory and it indicates the size of
* the VHD to create. If this field is present for updates or creation with other options, it
* indicates a resize. Resizes are only allowed if the disk is not attached to a running VM, and
* can only increase the disk's size.
*/
diskSizeGB?: number;
/**
* Encryption settings collection used be Azure Disk Encryption, can contain multiple encryption
* settings per disk or snapshot.
*/
encryptionSettingsCollection?: EncryptionSettingsCollection;
/**
* The disk provisioning state.
*/
readonly provisioningState?: string;
}
/**
* Snapshot update resource.
*/
export interface SnapshotUpdate {
/**
* the Operating System type. Possible values include: 'Windows', 'Linux'
*/
osType?: string;
/**
* If creationData.createOption is Empty, this field is mandatory and it indicates the size of
* the VHD to create. If this field is present for updates or creation with other options, it
* indicates a resize. Resizes are only allowed if the disk is not attached to a running VM, and
* can only increase the disk's size.
*/
diskSizeGB?: number;
/**
* Encryption settings collection used be Azure Disk Encryption, can contain multiple encryption
* settings per disk or snapshot.
*/
encryptionSettingsCollection?: EncryptionSettingsCollection;
/**
* Resource tags
*/
tags?: { [propertyName: string]: string };
sku?: SnapshotSku;
}
/**
* Describes the gallery unique name.
*/
export interface GalleryIdentifier {
/**
* The unique name of the Shared Image Gallery. This name is generated automatically by Azure.
*/
readonly uniqueName?: string;
}
/**
* Specifies information about the Shared Image Gallery that you want to create or update.
*/
export interface Gallery extends Resource {
/**
* The description of this Shared Image Gallery resource. This property is updatable.
*/
description?: string;
identifier?: GalleryIdentifier;
/**
* @summary The current state of the gallery.
* @description The provisioning state, which only appears in the response. Possible values
* include: 'Creating', 'Updating', 'Failed', 'Succeeded', 'Deleting', 'Migrating'
*/
readonly provisioningState?: string;
}
/**
* This is the gallery Image Definition identifier.
*/
export interface GalleryImageIdentifier {
/**
* The name of the gallery Image Definition publisher.
*/
publisher: string;
/**
* The name of the gallery Image Definition offer.
*/
offer: string;
/**
* The name of the gallery Image Definition SKU.
*/
sku: string;
}
/**
* Describes the resource range.
*/
export interface ResourceRange {
/**
* The minimum number of the resource.
*/
min?: number;
/**
* The maximum number of the resource.
*/
max?: number;
}
/**
* The properties describe the recommended machine configuration for this Image Definition. These
* properties are updatable.
*/
export interface RecommendedMachineConfiguration {
vCPUs?: ResourceRange;
memory?: ResourceRange;
}
/**
* Describes the disallowed disk types.
*/
export interface Disallowed {
/**
* A list of disk types.
*/
diskTypes?: string[];
}
/**
* Describes the gallery Image Definition purchase plan. This is used by marketplace images.
*/
export interface ImagePurchasePlan {
/**
* The plan ID.
*/
name?: string;
/**
* The publisher ID.
*/
publisher?: string;
/**
* The product ID.
*/
product?: string;
}
/**
* Specifies information about the gallery Image Definition that you want to create or update.
*/
export interface GalleryImage extends Resource {
/**
* The description of this gallery Image Definition resource. This property is updatable.
*/
description?: string;
/**
* The Eula agreement for the gallery Image Definition.
*/
eula?: string;
/**
* The privacy statement uri.
*/
privacyStatementUri?: string;
/**
* The release note uri.
*/
releaseNoteUri?: string;
/**
* This property allows you to specify the type of the OS that is included in the disk when
* creating a VM from a managed image. <br><br> Possible values are: <br><br> **Windows**
* <br><br> **Linux**. Possible values include: 'Windows', 'Linux'
*/
osType: string;
/**
* The allowed values for OS State are 'Generalized'. Possible values include: 'Generalized',
* 'Specialized'
*/
osState: string;
/**
* The end of life date of the gallery Image Definition. This property can be used for
* decommissioning purposes. This property is updatable.
*/
endOfLifeDate?: Date;
identifier: GalleryImageIdentifier;
recommended?: RecommendedMachineConfiguration;
disallowed?: Disallowed;
purchasePlan?: ImagePurchasePlan;
/**
* @summary The current state of the gallery Image Definition.
* @description The provisioning state, which only appears in the response. Possible values
* include: 'Creating', 'Updating', 'Failed', 'Succeeded', 'Deleting', 'Migrating'
*/
readonly provisioningState?: string;
}
/**
* Describes the basic gallery artifact publishing profile.
*/
export interface GalleryArtifactPublishingProfileBase {
/**
* The target regions where the Image Version is going to be replicated to. This property is
* updatable.
*/
targetRegions?: TargetRegion[];
source: GalleryArtifactSource;
}
/**
* The publishing profile of a gallery Image Version.
*/
export interface GalleryImageVersionPublishingProfile extends GalleryArtifactPublishingProfileBase
{
/**
* The number of replicas of the Image Version to be created per region. This property would take
* effect for a region when regionalReplicaCount is not specified. This property is updatable.
*/
replicaCount?: number;
/**
* If set to true, Virtual Machines deployed from the latest version of the Image Definition
* won't use this Image Version.
*/
excludeFromLatest?: boolean;
/**
* The timestamp for when the gallery Image Version is published.
*/
readonly publishedDate?: Date;
/**
* The end of life date of the gallery Image Version. This property can be used for
* decommissioning purposes. This property is updatable.
*/
endOfLifeDate?: Date;
/**
* Specifies the storage account type to be used to store the image. This property is not
* updatable. Possible values include: 'Standard_LRS', 'Standard_ZRS'
*/
storageAccountType?: string;
}
/**
* This is the disk image base class.
*/
export interface GalleryDiskImage {
/**
* This property indicates the size of the VHD to be created.
*/
readonly sizeInGB?: number;
/**
* The host caching of the disk. Valid values are 'None', 'ReadOnly', and 'ReadWrite'. Possible
* values include: 'None', 'ReadOnly', 'ReadWrite'
*/
readonly hostCaching?: string;
}
/**
* This is the OS disk image.
*/
export interface GalleryOSDiskImage extends GalleryDiskImage {
}
/**
* This is the data disk image.
*/
export interface GalleryDataDiskImage extends GalleryDiskImage {
/**
* This property specifies the logical unit number of the data disk. This value is used to
* identify data disks within the Virtual Machine and therefore must be unique for each data disk
* attached to the Virtual Machine.
*/
readonly lun?: number;
}
/**
* This is the storage profile of a gallery Image Version.
*/
export interface GalleryImageVersionStorageProfile {
readonly osDiskImage?: GalleryOSDiskImage;
/**
* A list of data disk images.
*/
readonly dataDiskImages?: GalleryDataDiskImage[];
}
/**
* This is the regional replication status.
*/
export interface RegionalReplicationStatus {
/**
* The region to which the gallery Image Version is being replicated to.
*/
readonly region?: string;
/**
* This is the regional replication state. Possible values include: 'Unknown', 'Replicating',
* 'Completed', 'Failed'
*/
readonly state?: string;
/**
* The details of the replication status.
*/
readonly details?: string;
/**
* It indicates progress of the replication job.
*/
readonly progress?: number;
}
/**
* This is the replication status of the gallery Image Version.
*/
export interface ReplicationStatus {
/**
* This is the aggregated replication status based on all the regional replication status flags.
* Possible values include: 'Unknown', 'InProgress', 'Completed', 'Failed'
*/
readonly aggregatedState?: string;
/**
* This is a summary of replication status for each region.
*/
readonly summary?: RegionalReplicationStatus[];
}
/**
* Specifies information about the gallery Image Version that you want to create or update.
*/
export interface GalleryImageVersion extends Resource {
publishingProfile: GalleryImageVersionPublishingProfile;
/**
* @summary The current state of the gallery Image Version.
* @description The provisioning state, which only appears in the response. Possible values
* include: 'Creating', 'Updating', 'Failed', 'Succeeded', 'Deleting', 'Migrating'
*/
readonly provisioningState?: string;
readonly storageProfile?: GalleryImageVersionStorageProfile;
readonly replicationStatus?: ReplicationStatus;
}
/**
* Describes the target region information.
*/
export interface TargetRegion {
/**
* The name of the region.
*/
name: string;
/**
* The number of replicas of the Image Version to be created per region. This property is
* updatable.
*/
regionalReplicaCount?: number;
/**
* Specifies the storage account type to be used to store the image. This property is not
* updatable. Possible values include: 'Standard_LRS', 'Standard_ZRS'
*/
storageAccountType?: string;
}
/**
* The managed artifact.
*/
export interface ManagedArtifact {
/**
* The managed artifact id.
*/
id: string;
}
/**
* The source image from which the Image Version is going to be created.
*/
export interface GalleryArtifactSource {
managedImage: ManagedArtifact;
}
/**
* Properties to configure a custom container service cluster.
*/
export interface ContainerServiceCustomProfile {
/**
* The name of the custom orchestrator to use.
*/
orchestrator: string;
}
/**
* Information about a service principal identity for the cluster to use for manipulating Azure
* APIs.
*/
export interface ContainerServiceServicePrincipalProfile {
/**
* The ID for the service principal.
*/
clientId: string;
/**
* The secret password associated with the service principal.
*/
secret: string;
}
/**
* Profile for the container service orchestrator.
*/
export interface ContainerServiceOrchestratorProfile {
/**
* The orchestrator to use to manage container service cluster resources. Valid values are Swarm,
* DCOS, and Custom. Possible values include: 'Swarm', 'DCOS', 'Custom', 'Kubernetes'
*/
orchestratorType: string;
}
/**
* Profile for the container service master.
*/
export interface ContainerServiceMasterProfile {
/**
* Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5. The
* default value is 1.
*/
count?: number;
/**
* DNS prefix to be used to create the FQDN for master.
*/
dnsPrefix: string;
/**
* FQDN for the master.
*/
readonly fqdn?: string;
}
/**
* Profile for the container service agent pool.
*/
export interface ContainerServiceAgentPoolProfile {
/**
* Unique name of the agent pool profile in the context of the subscription and resource group.
*/
name: string;
/**
* Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to
* 100 (inclusive). The default value is 1.
*/
count: number;
/**
* Size of agent VMs. Possible values include: 'Standard_A0', 'Standard_A1', 'Standard_A2',
* 'Standard_A3', 'Standard_A4', 'Standard_A5', 'Standard_A6', 'Standard_A7', 'Standard_A8',
* 'Standard_A9', 'Standard_A10', 'Standard_A11', 'Standard_D1', 'Standard_D2', 'Standard_D3',
* 'Standard_D4', 'Standard_D11', 'Standard_D12', 'Standard_D13', 'Standard_D14',
* 'Standard_D1_v2', 'Standard_D2_v2', 'Standard_D3_v2', 'Standard_D4_v2', 'Standard_D5_v2',
* 'Standard_D11_v2', 'Standard_D12_v2', 'Standard_D13_v2', 'Standard_D14_v2', 'Standard_G1',
* 'Standard_G2', 'Standard_G3', 'Standard_G4', 'Standard_G5', 'Standard_DS1', 'Standard_DS2',
* 'Standard_DS3', 'Standard_DS4', 'Standard_DS11', 'Standard_DS12', 'Standard_DS13',
* 'Standard_DS14', 'Standard_GS1', 'Standard_GS2', 'Standard_GS3', 'Standard_GS4',
* 'Standard_GS5'
*/
vmSize: string;
/**
* DNS prefix to be used to create the FQDN for the agent pool.
*/
dnsPrefix: string;
/**
* FQDN for the agent pool.
*/
readonly fqdn?: string;
}
/**
* Profile for Windows VMs in the container service cluster.
*/
export interface ContainerServiceWindowsProfile {
/**
* The administrator username to use for Windows VMs.
*/
adminUsername: string;
/**
* The administrator password to use for Windows VMs.
*/
adminPassword: string;
}
/**
* Contains information about SSH certificate public key data.
*/
export interface ContainerServiceSshPublicKey {
/**
* Certificate public key used to authenticate with VMs through SSH. The certificate must be in
* PEM format with or without headers.
*/
keyData: string;
}
/**
* SSH configuration for Linux-based VMs running on Azure.
*/
export interface ContainerServiceSshConfiguration {
/**
* the list of SSH public keys used to authenticate with Linux-based VMs.
*/
publicKeys: ContainerServiceSshPublicKey[];
}
/**
* Profile for Linux VMs in the container service cluster.
*/
export interface ContainerServiceLinuxProfile {
/**
* The administrator username to use for Linux VMs.
*/
adminUsername: string;
/**
* The ssh key configuration for Linux VMs.
*/
ssh: ContainerServiceSshConfiguration;
}
/**
* Profile for diagnostics on the container service VMs.
*/
export interface ContainerServiceVMDiagnostics {
/**
* Whether the VM diagnostic agent is provisioned on the VM.
*/
enabled: boolean;
/**
* The URI of the storage account where diagnostics are stored.
*/
readonly storageUri?: string;
}
export interface ContainerServiceDiagnosticsProfile {
/**
* Profile for the container service VM diagnostic agent.
*/
vmDiagnostics: ContainerServiceVMDiagnostics;
}
/**
* Container service.
*/
export interface ContainerService extends Resource {
/**
* the current deployment or provisioning state, which only appears in the response.
*/
readonly provisioningState?: string;
/**
* Properties of the orchestrator.
*/
orchestratorProfile?: ContainerServiceOrchestratorProfile;
/**
* Properties for custom clusters.
*/
customProfile?: ContainerServiceCustomProfile;
/**
* Properties for cluster service principals.
*/
servicePrincipalProfile?: ContainerServiceServicePrincipalProfile;
/**
* Properties of master agents.
*/
masterProfile: ContainerServiceMasterProfile;
/**
* Properties of the agent pool.
*/
agentPoolProfiles: ContainerServiceAgentPoolProfile[];
/**
* Properties of Windows VMs.
*/
windowsProfile?: ContainerServiceWindowsProfile;
/**
* Properties of Linux VMs.
*/
linuxProfile: ContainerServiceLinuxProfile;
/**
* Properties of the diagnostic agent.
*/
diagnosticsProfile?: ContainerServiceDiagnosticsProfile;
}
/**
* The List Compute Operation operation response.
*/
export interface ComputeOperationListResult extends Array<ComputeOperationValue> {
}
/**
* The List Availability Set operation response.
*/
export interface AvailabilitySetListResult extends Array<AvailabilitySet> {
/**
* The URI to fetch the next page of AvailabilitySets. Call ListNext() with this URI to fetch the
* next page of AvailabilitySets.
*/
nextLink?: string;
}
/**
* The List Virtual Machine operation response.
*/
export interface VirtualMachineSizeListResult extends Array<VirtualMachineSize> {
}
/**
* The List Usages operation response.
*/
export interface ListUsagesResult extends Array<Usage> {
/**
* The URI to fetch the next page of compute resource usage information. Call ListNext() with
* this to fetch the next page of compute resource usage information.
*/
nextLink?: string;
}
/**
* The List Virtual Machine operation response.
*/
export interface VirtualMachineListResult extends Array<VirtualMachine> {
/**
* The URI to fetch the next page of VMs. Call ListNext() with this URI to fetch the next page of
* Virtual Machines.
*/
nextLink?: string;
}
/**
* The List Image operation response.
*/
export interface ImageListResult extends Array<Image> {
/**
* The uri to fetch the next page of Images. Call ListNext() with this to fetch the next page of
* Images.
*/
nextLink?: string;
}
/**
* The List Virtual Machine operation response.
*/
export interface VirtualMachineScaleSetListResult extends Array<VirtualMachineScaleSet> {
/**
* The uri to fetch the next page of Virtual Machine Scale Sets. Call ListNext() with this to
* fetch the next page of VMSS.
*/
nextLink?: string;
}
/**
* The List Virtual Machine operation response.
*/
export interface VirtualMachineScaleSetListWithLinkResult extends Array<VirtualMachineScaleSet> {
/**
* The uri to fetch the next page of Virtual Machine Scale Sets. Call ListNext() with this to
* fetch the next page of Virtual Machine Scale Sets.
*/
nextLink?: string;
}
/**
* The Virtual Machine Scale Set List Skus operation response.
*/
export interface VirtualMachineScaleSetListSkusResult extends Array<VirtualMachineScaleSetSku> {
/**
* The uri to fetch the next page of Virtual Machine Scale Set Skus. Call ListNext() with this to
* fetch the next page of VMSS Skus.
*/
nextLink?: string;
}
/**
* List of Virtual Machine Scale Set OS Upgrade History operation response.
*/
export interface VirtualMachineScaleSetListOSUpgradeHistory extends
Array<UpgradeOperationHistoricalStatusInfo> {
/**
* The uri to fetch the next page of OS Upgrade History. Call ListNext() with this to fetch the
* next page of history of upgrades.
*/
nextLink?: string;
}
/**
* The List VM scale set extension operation response.
*/
export interface VirtualMachineScaleSetExtensionListResult extends
Array<VirtualMachineScaleSetExtension> {
/**
* The uri to fetch the next page of VM scale set extensions. Call ListNext() with this to fetch
* the next page of VM scale set extensions.
*/
nextLink?: string;
}
/**
* The List Virtual Machine Scale Set VMs operation response.
*/
export interface VirtualMachineScaleSetVMListResult extends Array<VirtualMachineScaleSetVM> {
/**
* The uri to fetch the next page of Virtual Machine Scale Set VMs. Call ListNext() with this to
* fetch the next page of VMSS VMs
*/
nextLink?: string;
}
/**
* The List Virtual Machine operation response.
*/
export interface RunCommandListResult extends Array<RunCommandDocumentBase> {
/**
* The uri to fetch the next page of run commands. Call ListNext() with this to fetch the next
* page of run commands.
*/
nextLink?: string;
}
/**
* The Compute List Skus operation response.
*/
export interface ResourceSkusResult extends Array<ResourceSku> {
/**
* The uri to fetch the next page of Compute Skus. Call ListNext() with this to fetch the next
* page of VMSS Skus.
*/
nextLink?: string;
}
/**
* The List Disks operation response.
*/
export interface DiskList extends Array<Disk> {
/**
* The uri to fetch the next page of disks. Call ListNext() with this to fetch the next page of
* disks.
*/
nextLink?: string;
}
/**
* The List Snapshots operation response.
*/
export interface SnapshotList extends Array<Snapshot> {
/**
* The uri to fetch the next page of snapshots. Call ListNext() with this to fetch the next page
* of snapshots.
*/
nextLink?: string;
}
/**
* The List Galleries operation response.
*/
export interface GalleryList extends Array<Gallery> {
/**
* The uri to fetch the next page of galleries. Call ListNext() with this to fetch the next page
* of galleries.
*/
nextLink?: string;
}
/**
* The List Gallery Images operation response.
*/
export interface GalleryImageList extends Array<GalleryImage> {
/**
* The uri to fetch the next page of Image Definitions in the Shared Image Gallery. Call
* ListNext() with this to fetch the next page of gallery Image Definitions.
*/
nextLink?: string;
}
/**
* The List Gallery Image version operation response.
*/
export interface GalleryImageVersionList extends Array<GalleryImageVersion> {
/**
* The uri to fetch the next page of gallery Image Versions. Call ListNext() with this to fetch
* the next page of gallery Image Versions.
*/
nextLink?: string;
}
/**
* The response from the List Container Services operation.
*/
export interface ContainerServiceListResult extends Array<ContainerService> {
/**
* The URL to get the next set of container service results.
*/
nextLink?: string;
} | the_stack |
import type {ScopeValueSets, NameValue, ValueScope, ValueScopeName} from "./scope"
import {_, nil, _Code, Code, Name, UsedNames, CodeItem, addCodeArg, _CodeOrName} from "./code"
import {Scope, varKinds} from "./scope"
export {_, str, strConcat, nil, getProperty, stringify, regexpCode, Name, Code} from "./code"
export {Scope, ScopeStore, ValueScope, ValueScopeName, ScopeValueSets, varKinds} from "./scope"
// type for expressions that can be safely inserted in code without quotes
export type SafeExpr = Code | number | boolean | null
// type that is either Code of function that adds code to CodeGen instance using its methods
export type Block = Code | (() => void)
export const operators = {
GT: new _Code(">"),
GTE: new _Code(">="),
LT: new _Code("<"),
LTE: new _Code("<="),
EQ: new _Code("==="),
NEQ: new _Code("!=="),
NOT: new _Code("!"),
OR: new _Code("||"),
AND: new _Code("&&"),
ADD: new _Code("+"),
}
abstract class Node {
abstract readonly names: UsedNames
optimizeNodes(): this | ChildNode | ChildNode[] | undefined {
return this
}
optimizeNames(_names: UsedNames, _constants: Constants): this | undefined {
return this
}
// get count(): number {
// return 1
// }
}
class Def extends Node {
constructor(private readonly varKind: Name, private readonly name: Name, private rhs?: SafeExpr) {
super()
}
render({es5, _n}: CGOptions): string {
const varKind = es5 ? varKinds.var : this.varKind
const rhs = this.rhs === undefined ? "" : ` = ${this.rhs}`
return `${varKind} ${this.name}${rhs};` + _n
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
if (!names[this.name.str]) return
if (this.rhs) this.rhs = optimizeExpr(this.rhs, names, constants)
return this
}
get names(): UsedNames {
return this.rhs instanceof _CodeOrName ? this.rhs.names : {}
}
}
class Assign extends Node {
constructor(readonly lhs: Code, public rhs: SafeExpr, private readonly sideEffects?: boolean) {
super()
}
render({_n}: CGOptions): string {
return `${this.lhs} = ${this.rhs};` + _n
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
if (this.lhs instanceof Name && !names[this.lhs.str] && !this.sideEffects) return
this.rhs = optimizeExpr(this.rhs, names, constants)
return this
}
get names(): UsedNames {
const names = this.lhs instanceof Name ? {} : {...this.lhs.names}
return addExprNames(names, this.rhs)
}
}
class AssignOp extends Assign {
constructor(lhs: Code, private readonly op: Code, rhs: SafeExpr, sideEffects?: boolean) {
super(lhs, rhs, sideEffects)
}
render({_n}: CGOptions): string {
return `${this.lhs} ${this.op}= ${this.rhs};` + _n
}
}
class Label extends Node {
readonly names: UsedNames = {}
constructor(readonly label: Name) {
super()
}
render({_n}: CGOptions): string {
return `${this.label}:` + _n
}
}
class Break extends Node {
readonly names: UsedNames = {}
constructor(readonly label?: Code) {
super()
}
render({_n}: CGOptions): string {
const label = this.label ? ` ${this.label}` : ""
return `break${label};` + _n
}
}
class Throw extends Node {
constructor(readonly error: Code) {
super()
}
render({_n}: CGOptions): string {
return `throw ${this.error};` + _n
}
get names(): UsedNames {
return this.error.names
}
}
class AnyCode extends Node {
constructor(private code: SafeExpr) {
super()
}
render({_n}: CGOptions): string {
return `${this.code};` + _n
}
optimizeNodes(): this | undefined {
return `${this.code}` ? this : undefined
}
optimizeNames(names: UsedNames, constants: Constants): this {
this.code = optimizeExpr(this.code, names, constants)
return this
}
get names(): UsedNames {
return this.code instanceof _CodeOrName ? this.code.names : {}
}
}
abstract class ParentNode extends Node {
constructor(readonly nodes: ChildNode[] = []) {
super()
}
render(opts: CGOptions): string {
return this.nodes.reduce((code, n) => code + n.render(opts), "")
}
optimizeNodes(): this | ChildNode | ChildNode[] | undefined {
const {nodes} = this
let i = nodes.length
while (i--) {
const n = nodes[i].optimizeNodes()
if (Array.isArray(n)) nodes.splice(i, 1, ...n)
else if (n) nodes[i] = n
else nodes.splice(i, 1)
}
return nodes.length > 0 ? this : undefined
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
const {nodes} = this
let i = nodes.length
while (i--) {
// iterating backwards improves 1-pass optimization
const n = nodes[i]
if (n.optimizeNames(names, constants)) continue
subtractNames(names, n.names)
nodes.splice(i, 1)
}
return nodes.length > 0 ? this : undefined
}
get names(): UsedNames {
return this.nodes.reduce((names: UsedNames, n) => addNames(names, n.names), {})
}
// get count(): number {
// return this.nodes.reduce((c, n) => c + n.count, 1)
// }
}
abstract class BlockNode extends ParentNode {
render(opts: CGOptions): string {
return "{" + opts._n + super.render(opts) + "}" + opts._n
}
}
class Root extends ParentNode {}
class Else extends BlockNode {
static readonly kind = "else"
}
class If extends BlockNode {
static readonly kind = "if"
else?: If | Else
constructor(private condition: Code | boolean, nodes?: ChildNode[]) {
super(nodes)
}
render(opts: CGOptions): string {
let code = `if(${this.condition})` + super.render(opts)
if (this.else) code += "else " + this.else.render(opts)
return code
}
optimizeNodes(): If | ChildNode[] | undefined {
super.optimizeNodes()
const cond = this.condition
if (cond === true) return this.nodes // else is ignored here
let e = this.else
if (e) {
const ns = e.optimizeNodes()
e = this.else = Array.isArray(ns) ? new Else(ns) : (ns as Else | undefined)
}
if (e) {
if (cond === false) return e instanceof If ? e : e.nodes
if (this.nodes.length) return this
return new If(not(cond), e instanceof If ? [e] : e.nodes)
}
if (cond === false || !this.nodes.length) return undefined
return this
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
this.else = this.else?.optimizeNames(names, constants)
if (!(super.optimizeNames(names, constants) || this.else)) return
this.condition = optimizeExpr(this.condition, names, constants)
return this
}
get names(): UsedNames {
const names = super.names
addExprNames(names, this.condition)
if (this.else) addNames(names, this.else.names)
return names
}
// get count(): number {
// return super.count + (this.else?.count || 0)
// }
}
abstract class For extends BlockNode {
static readonly kind = "for"
}
class ForLoop extends For {
constructor(private iteration: Code) {
super()
}
render(opts: CGOptions): string {
return `for(${this.iteration})` + super.render(opts)
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
if (!super.optimizeNames(names, constants)) return
this.iteration = optimizeExpr(this.iteration, names, constants)
return this
}
get names(): UsedNames {
return addNames(super.names, this.iteration.names)
}
}
class ForRange extends For {
constructor(
private readonly varKind: Name,
private readonly name: Name,
private readonly from: SafeExpr,
private readonly to: SafeExpr
) {
super()
}
render(opts: CGOptions): string {
const varKind = opts.es5 ? varKinds.var : this.varKind
const {name, from, to} = this
return `for(${varKind} ${name}=${from}; ${name}<${to}; ${name}++)` + super.render(opts)
}
get names(): UsedNames {
const names = addExprNames(super.names, this.from)
return addExprNames(names, this.to)
}
}
class ForIter extends For {
constructor(
private readonly loop: "of" | "in",
private readonly varKind: Name,
private readonly name: Name,
private iterable: Code
) {
super()
}
render(opts: CGOptions): string {
return `for(${this.varKind} ${this.name} ${this.loop} ${this.iterable})` + super.render(opts)
}
optimizeNames(names: UsedNames, constants: Constants): this | undefined {
if (!super.optimizeNames(names, constants)) return
this.iterable = optimizeExpr(this.iterable, names, constants)
return this
}
get names(): UsedNames {
return addNames(super.names, this.iterable.names)
}
}
class Func extends BlockNode {
static readonly kind = "func"
constructor(public name: Name, public args: Code, public async?: boolean) {
super()
}
render(opts: CGOptions): string {
const _async = this.async ? "async " : ""
return `${_async}function ${this.name}(${this.args})` + super.render(opts)
}
}
class Return extends ParentNode {
static readonly kind = "return"
render(opts: CGOptions): string {
return "return " + super.render(opts)
}
}
class Try extends BlockNode {
catch?: Catch
finally?: Finally
render(opts: CGOptions): string {
let code = "try" + super.render(opts)
if (this.catch) code += this.catch.render(opts)
if (this.finally) code += this.finally.render(opts)
return code
}
optimizeNodes(): this {
super.optimizeNodes()
this.catch?.optimizeNodes() as Catch | undefined
this.finally?.optimizeNodes() as Finally | undefined
return this
}
optimizeNames(names: UsedNames, constants: Constants): this {
super.optimizeNames(names, constants)
this.catch?.optimizeNames(names, constants)
this.finally?.optimizeNames(names, constants)
return this
}
get names(): UsedNames {
const names = super.names
if (this.catch) addNames(names, this.catch.names)
if (this.finally) addNames(names, this.finally.names)
return names
}
// get count(): number {
// return super.count + (this.catch?.count || 0) + (this.finally?.count || 0)
// }
}
class Catch extends BlockNode {
static readonly kind = "catch"
constructor(readonly error: Name) {
super()
}
render(opts: CGOptions): string {
return `catch(${this.error})` + super.render(opts)
}
}
class Finally extends BlockNode {
static readonly kind = "finally"
render(opts: CGOptions): string {
return "finally" + super.render(opts)
}
}
type StartBlockNode = If | For | Func | Return | Try
type LeafNode = Def | Assign | Label | Break | Throw | AnyCode
type ChildNode = StartBlockNode | LeafNode
type EndBlockNodeType =
| typeof If
| typeof Else
| typeof For
| typeof Func
| typeof Return
| typeof Catch
| typeof Finally
type Constants = Record<string, SafeExpr | undefined>
export interface CodeGenOptions {
es5?: boolean
lines?: boolean
ownProperties?: boolean
}
interface CGOptions extends CodeGenOptions {
_n: "\n" | ""
}
export class CodeGen {
readonly _scope: Scope
readonly _extScope: ValueScope
readonly _values: ScopeValueSets = {}
private readonly _nodes: ParentNode[]
private readonly _blockStarts: number[] = []
private readonly _constants: Constants = {}
private readonly opts: CGOptions
constructor(extScope: ValueScope, opts: CodeGenOptions = {}) {
this.opts = {...opts, _n: opts.lines ? "\n" : ""}
this._extScope = extScope
this._scope = new Scope({parent: extScope})
this._nodes = [new Root()]
}
toString(): string {
return this._root.render(this.opts)
}
// returns unique name in the internal scope
name(prefix: string): Name {
return this._scope.name(prefix)
}
// reserves unique name in the external scope
scopeName(prefix: string): ValueScopeName {
return this._extScope.name(prefix)
}
// reserves unique name in the external scope and assigns value to it
scopeValue(prefixOrName: ValueScopeName | string, value: NameValue): Name {
const name = this._extScope.value(prefixOrName, value)
const vs = this._values[name.prefix] || (this._values[name.prefix] = new Set())
vs.add(name)
return name
}
getScopeValue(prefix: string, keyOrRef: unknown): ValueScopeName | undefined {
return this._extScope.getValue(prefix, keyOrRef)
}
// return code that assigns values in the external scope to the names that are used internally
// (same names that were returned by gen.scopeName or gen.scopeValue)
scopeRefs(scopeName: Name): Code {
return this._extScope.scopeRefs(scopeName, this._values)
}
scopeCode(): Code {
return this._extScope.scopeCode(this._values)
}
private _def(
varKind: Name,
nameOrPrefix: Name | string,
rhs?: SafeExpr,
constant?: boolean
): Name {
const name = this._scope.toName(nameOrPrefix)
if (rhs !== undefined && constant) this._constants[name.str] = rhs
this._leafNode(new Def(varKind, name, rhs))
return name
}
// `const` declaration (`var` in es5 mode)
const(nameOrPrefix: Name | string, rhs: SafeExpr, _constant?: boolean): Name {
return this._def(varKinds.const, nameOrPrefix, rhs, _constant)
}
// `let` declaration with optional assignment (`var` in es5 mode)
let(nameOrPrefix: Name | string, rhs?: SafeExpr, _constant?: boolean): Name {
return this._def(varKinds.let, nameOrPrefix, rhs, _constant)
}
// `var` declaration with optional assignment
var(nameOrPrefix: Name | string, rhs?: SafeExpr, _constant?: boolean): Name {
return this._def(varKinds.var, nameOrPrefix, rhs, _constant)
}
// assignment code
assign(lhs: Code, rhs: SafeExpr, sideEffects?: boolean): CodeGen {
return this._leafNode(new Assign(lhs, rhs, sideEffects))
}
// `+=` code
add(lhs: Code, rhs: SafeExpr): CodeGen {
return this._leafNode(new AssignOp(lhs, operators.ADD, rhs))
}
// appends passed SafeExpr to code or executes Block
code(c: Block | SafeExpr): CodeGen {
if (typeof c == "function") c()
else if (c !== nil) this._leafNode(new AnyCode(c))
return this
}
// returns code for object literal for the passed argument list of key-value pairs
object(...keyValues: [Name | string, SafeExpr | string][]): _Code {
const code: CodeItem[] = ["{"]
for (const [key, value] of keyValues) {
if (code.length > 1) code.push(",")
code.push(key)
if (key !== value || this.opts.es5) {
code.push(":")
addCodeArg(code, value)
}
}
code.push("}")
return new _Code(code)
}
// `if` clause (or statement if `thenBody` and, optionally, `elseBody` are passed)
if(condition: Code | boolean, thenBody?: Block, elseBody?: Block): CodeGen {
this._blockNode(new If(condition))
if (thenBody && elseBody) {
this.code(thenBody).else().code(elseBody).endIf()
} else if (thenBody) {
this.code(thenBody).endIf()
} else if (elseBody) {
throw new Error('CodeGen: "else" body without "then" body')
}
return this
}
// `else if` clause - invalid without `if` or after `else` clauses
elseIf(condition: Code | boolean): CodeGen {
return this._elseNode(new If(condition))
}
// `else` clause - only valid after `if` or `else if` clauses
else(): CodeGen {
return this._elseNode(new Else())
}
// end `if` statement (needed if gen.if was used only with condition)
endIf(): CodeGen {
return this._endBlockNode(If, Else)
}
private _for(node: For, forBody?: Block): CodeGen {
this._blockNode(node)
if (forBody) this.code(forBody).endFor()
return this
}
// a generic `for` clause (or statement if `forBody` is passed)
for(iteration: Code, forBody?: Block): CodeGen {
return this._for(new ForLoop(iteration), forBody)
}
// `for` statement for a range of values
forRange(
nameOrPrefix: Name | string,
from: SafeExpr,
to: SafeExpr,
forBody: (index: Name) => void,
varKind: Code = this.opts.es5 ? varKinds.var : varKinds.let
): CodeGen {
const name = this._scope.toName(nameOrPrefix)
return this._for(new ForRange(varKind, name, from, to), () => forBody(name))
}
// `for-of` statement (in es5 mode replace with a normal for loop)
forOf(
nameOrPrefix: Name | string,
iterable: Code,
forBody: (item: Name) => void,
varKind: Code = varKinds.const
): CodeGen {
const name = this._scope.toName(nameOrPrefix)
if (this.opts.es5) {
const arr = iterable instanceof Name ? iterable : this.var("_arr", iterable)
return this.forRange("_i", 0, _`${arr}.length`, (i) => {
this.var(name, _`${arr}[${i}]`)
forBody(name)
})
}
return this._for(new ForIter("of", varKind, name, iterable), () => forBody(name))
}
// `for-in` statement.
// With option `ownProperties` replaced with a `for-of` loop for object keys
forIn(
nameOrPrefix: Name | string,
obj: Code,
forBody: (item: Name) => void,
varKind: Code = this.opts.es5 ? varKinds.var : varKinds.const
): CodeGen {
if (this.opts.ownProperties) {
return this.forOf(nameOrPrefix, _`Object.keys(${obj})`, forBody)
}
const name = this._scope.toName(nameOrPrefix)
return this._for(new ForIter("in", varKind, name, obj), () => forBody(name))
}
// end `for` loop
endFor(): CodeGen {
return this._endBlockNode(For)
}
// `label` statement
label(label: Name): CodeGen {
return this._leafNode(new Label(label))
}
// `break` statement
break(label?: Code): CodeGen {
return this._leafNode(new Break(label))
}
// `return` statement
return(value: Block | SafeExpr): CodeGen {
const node = new Return()
this._blockNode(node)
this.code(value)
if (node.nodes.length !== 1) throw new Error('CodeGen: "return" should have one node')
return this._endBlockNode(Return)
}
// `try` statement
try(tryBody: Block, catchCode?: (e: Name) => void, finallyCode?: Block): CodeGen {
if (!catchCode && !finallyCode) throw new Error('CodeGen: "try" without "catch" and "finally"')
const node = new Try()
this._blockNode(node)
this.code(tryBody)
if (catchCode) {
const error = this.name("e")
this._currNode = node.catch = new Catch(error)
catchCode(error)
}
if (finallyCode) {
this._currNode = node.finally = new Finally()
this.code(finallyCode)
}
return this._endBlockNode(Catch, Finally)
}
// `throw` statement
throw(error: Code): CodeGen {
return this._leafNode(new Throw(error))
}
// start self-balancing block
block(body?: Block, nodeCount?: number): CodeGen {
this._blockStarts.push(this._nodes.length)
if (body) this.code(body).endBlock(nodeCount)
return this
}
// end the current self-balancing block
endBlock(nodeCount?: number): CodeGen {
const len = this._blockStarts.pop()
if (len === undefined) throw new Error("CodeGen: not in self-balancing block")
const toClose = this._nodes.length - len
if (toClose < 0 || (nodeCount !== undefined && toClose !== nodeCount)) {
throw new Error(`CodeGen: wrong number of nodes: ${toClose} vs ${nodeCount} expected`)
}
this._nodes.length = len
return this
}
// `function` heading (or definition if funcBody is passed)
func(name: Name, args: Code = nil, async?: boolean, funcBody?: Block): CodeGen {
this._blockNode(new Func(name, args, async))
if (funcBody) this.code(funcBody).endFunc()
return this
}
// end function definition
endFunc(): CodeGen {
return this._endBlockNode(Func)
}
optimize(n = 1): void {
while (n-- > 0) {
this._root.optimizeNodes()
this._root.optimizeNames(this._root.names, this._constants)
}
}
private _leafNode(node: LeafNode): CodeGen {
this._currNode.nodes.push(node)
return this
}
private _blockNode(node: StartBlockNode): void {
this._currNode.nodes.push(node)
this._nodes.push(node)
}
private _endBlockNode(N1: EndBlockNodeType, N2?: EndBlockNodeType): CodeGen {
const n = this._currNode
if (n instanceof N1 || (N2 && n instanceof N2)) {
this._nodes.pop()
return this
}
throw new Error(`CodeGen: not in block "${N2 ? `${N1.kind}/${N2.kind}` : N1.kind}"`)
}
private _elseNode(node: If | Else): CodeGen {
const n = this._currNode
if (!(n instanceof If)) {
throw new Error('CodeGen: "else" without "if"')
}
this._currNode = n.else = node
return this
}
private get _root(): Root {
return this._nodes[0] as Root
}
private get _currNode(): ParentNode {
const ns = this._nodes
return ns[ns.length - 1]
}
private set _currNode(node: ParentNode) {
const ns = this._nodes
ns[ns.length - 1] = node
}
// get nodeCount(): number {
// return this._root.count
// }
}
function addNames(names: UsedNames, from: UsedNames): UsedNames {
for (const n in from) names[n] = (names[n] || 0) + (from[n] || 0)
return names
}
function addExprNames(names: UsedNames, from: SafeExpr): UsedNames {
return from instanceof _CodeOrName ? addNames(names, from.names) : names
}
function optimizeExpr<T extends SafeExpr | Code>(expr: T, names: UsedNames, constants: Constants): T
function optimizeExpr(expr: SafeExpr, names: UsedNames, constants: Constants): SafeExpr {
if (expr instanceof Name) return replaceName(expr)
if (!canOptimize(expr)) return expr
return new _Code(
expr._items.reduce((items: CodeItem[], c: SafeExpr | string) => {
if (c instanceof Name) c = replaceName(c)
if (c instanceof _Code) items.push(...c._items)
else items.push(c)
return items
}, [])
)
function replaceName(n: Name): SafeExpr {
const c = constants[n.str]
if (c === undefined || names[n.str] !== 1) return n
delete names[n.str]
return c
}
function canOptimize(e: SafeExpr): e is _Code {
return (
e instanceof _Code &&
e._items.some(
(c) => c instanceof Name && names[c.str] === 1 && constants[c.str] !== undefined
)
)
}
}
function subtractNames(names: UsedNames, from: UsedNames): void {
for (const n in from) names[n] = (names[n] || 0) - (from[n] || 0)
}
export function not<T extends Code | SafeExpr>(x: T): T
export function not(x: Code | SafeExpr): Code | SafeExpr {
return typeof x == "boolean" || typeof x == "number" || x === null ? !x : _`!${par(x)}`
}
const andCode = mappend(operators.AND)
// boolean AND (&&) expression with the passed arguments
export function and(...args: Code[]): Code {
return args.reduce(andCode)
}
const orCode = mappend(operators.OR)
// boolean OR (||) expression with the passed arguments
export function or(...args: Code[]): Code {
return args.reduce(orCode)
}
type MAppend = (x: Code, y: Code) => Code
function mappend(op: Code): MAppend {
return (x, y) => (x === nil ? y : y === nil ? x : _`${par(x)} ${op} ${par(y)}`)
}
function par(x: Code): Code {
return x instanceof Name ? x : _`(${x})`
} | the_stack |
import { IGeoIntersectsOptions, IGeoNearOptions, IGeoWithinOptions, QueryCommand, QUERY_COMMANDS_LITERAL } from './commands/query'
import { LogicCommand, LOGIC_COMMANDS_LITERAL } from './commands/logic'
import { UpdateCommand, UPDATE_COMMANDS_LITERAL } from './commands/update'
import { isArray, isObject, isString } from './utils/type'
import Aggregation from './aggregate'
export type IQueryCondition = Record<string, any> | LogicCommand
export const Command = {
eq(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.EQ, [val])
},
neq(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.NEQ, [val])
},
lt(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.LT, [val])
},
lte(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.LTE, [val])
},
gt(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.GT, [val])
},
gte(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.GTE, [val])
},
in(val: any[]) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.IN, val)
},
nin(val: any[]) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.NIN, val)
},
all(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.ALL, val)
},
elemMatch(val: any) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.ELEM_MATCH, [val])
},
exists(val: boolean) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.EXISTS, [val])
},
size(val: number) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.SIZE, [val])
},
mod(val: number[]) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.MOD, [val])
},
geoNear(val: IGeoNearOptions) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.GEO_NEAR, [val])
},
geoWithin(val: IGeoWithinOptions) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.GEO_WITHIN, [val])
},
geoIntersects(val: IGeoIntersectsOptions) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.GEO_INTERSECTS, [val])
},
like(val: string) {
return new QueryCommand(QUERY_COMMANDS_LITERAL.LIKE, [val])
},
and(...__expressions__: IQueryCondition[]) {
const expressions = isArray(arguments[0]) ? arguments[0] : Array.from(arguments)
return new LogicCommand(LOGIC_COMMANDS_LITERAL.AND, expressions)
},
nor(...__expressions__: IQueryCondition[]) {
const expressions = isArray(arguments[0]) ? arguments[0] : Array.from(arguments)
return new LogicCommand(LOGIC_COMMANDS_LITERAL.NOR, expressions)
},
or(...__expressions__: IQueryCondition[]) {
const expressions = isArray(arguments[0]) ? arguments[0] : Array.from(arguments)
return new LogicCommand(LOGIC_COMMANDS_LITERAL.OR, expressions)
},
not(...__expressions__: IQueryCondition[]) {
const expressions = isArray(arguments[0]) ? arguments[0] : Array.from(arguments)
return new LogicCommand(LOGIC_COMMANDS_LITERAL.NOT, expressions)
},
set(val: any) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.SET, [val])
},
remove() {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.REMOVE, [])
},
inc(val: number) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.INC, [val])
},
mul(val: number) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.MUL, [val])
},
push(...args: any[]) {
let values
if (isObject(args[0]) && args[0].hasOwnProperty('each')) {
const options: any = args[0]
values = {
$each: options.each,
$position: options.position,
$sort: options.sort,
$slice: options.slice
}
} else if (isArray(args[0])) {
values = args[0]
} else {
values = Array.from(args)
}
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.PUSH, values)
},
pull(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.PULL, values)
},
pullAll(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.PULL_ALL, values)
},
pop() {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.POP, [])
},
shift() {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.SHIFT, [])
},
unshift(...__values__: any[]) {
const values = isArray(arguments[0]) ? arguments[0] : Array.from(arguments)
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.UNSHIFT, values)
},
addToSet(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.ADD_TO_SET, values)
},
rename(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.RENAME, [values])
},
bit(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.BIT, [values])
},
max(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.MAX, [values])
},
min(values) {
return new UpdateCommand(UPDATE_COMMANDS_LITERAL.MIN, [values])
},
expr(values: AggregationOperator) {
return {
$expr: values
}
},
jsonSchema(schema) {
return {
$jsonSchema: schema
}
},
text(values: string | {
search: string
language?: string
caseSensitive?: boolean
diacriticSensitive: boolean
}) {
if (isString(values)) {
return {
$search: values.search
}
} else {
return {
$search: values.search,
$language: values.language,
$caseSensitive: values.caseSensitive,
$diacriticSensitive: values.diacriticSensitive
}
}
},
aggregate: {
pipeline() {
return new Aggregation()
},
// https://docs.mongodb.com/manual/reference/operator/aggregation/
// 算数操作符(15个)
abs: (param) => new AggregationOperator('abs', param),
add: (param) => new AggregationOperator('add', param),
ceil: (param) => new AggregationOperator('ceil', param),
divide: (param) => new AggregationOperator('divide', param),
exp: (param) => new AggregationOperator('exp', param),
floor: (param) => new AggregationOperator('floor', param),
ln: (param) => new AggregationOperator('ln', param),
log: (param) => new AggregationOperator('log', param),
log10: (param) => new AggregationOperator('log10', param),
mod: (param) => new AggregationOperator('mod', param),
multiply: (param) => new AggregationOperator('multiply', param),
pow: (param) => new AggregationOperator('pow', param),
sqrt: (param) => new AggregationOperator('sqrt', param),
subtract: (param) => new AggregationOperator('subtract', param),
trunc: (param) => new AggregationOperator('trunc', param),
// 数组操作符(15个)
arrayElemAt: (param) => new AggregationOperator('arrayElemAt', param),
arrayToObject: (param) => new AggregationOperator('arrayToObject', param),
concatArrays: (param) => new AggregationOperator('concatArrays', param),
filter: (param) => new AggregationOperator('filter', param),
in: (param) => new AggregationOperator('in', param),
indexOfArray: (param) => new AggregationOperator('indexOfArray', param),
isArray: (param) => new AggregationOperator('isArray', param),
map: (param) => new AggregationOperator('map', param),
range: (param) => new AggregationOperator('range', param),
reduce: (param) => new AggregationOperator('reduce', param),
reverseArray: (param) => new AggregationOperator('reverseArray', param),
size: (param) => new AggregationOperator('size', param),
slice: (param) => new AggregationOperator('slice', param),
zip: (param) => new AggregationOperator('zip', param),
//布尔操作符(3个)
and: (param) => new AggregationOperator('and', param),
not: (param) => new AggregationOperator('not', param),
or: (param) => new AggregationOperator('or', param),
// 比较操作符(7个)
cmp: (param) => new AggregationOperator('cmp', param),
eq: (param) => new AggregationOperator('eq', param),
gt: (param) => new AggregationOperator('gt', param),
gte: (param) => new AggregationOperator('gte', param),
lt: (param) => new AggregationOperator('lt', param),
lte: (param) => new AggregationOperator('lte', param),
neq: (param) => new AggregationOperator('ne', param),
// 条件操作符(3个)
cond: (param) => new AggregationOperator('cond', param),
ifNull: (param) => new AggregationOperator('ifNull', param),
switch: (param) => new AggregationOperator('switch', param),
// 日期操作符(15个)
dateFromParts: (param) => new AggregationOperator('dateFromParts', param),
dateFromString: (param) => new AggregationOperator('dateFromString', param),
dayOfMonth: (param) => new AggregationOperator('dayOfMonth', param),
dayOfWeek: (param) => new AggregationOperator('dayOfWeek', param),
dayOfYear: (param) => new AggregationOperator('dayOfYear', param),
isoDayOfWeek: (param) => new AggregationOperator('isoDayOfWeek', param),
isoWeek: (param) => new AggregationOperator('isoWeek', param),
isoWeekYear: (param) => new AggregationOperator('isoWeekYear', param),
millisecond: (param) => new AggregationOperator('millisecond', param),
minute: (param) => new AggregationOperator('minute', param),
month: (param) => new AggregationOperator('month', param),
second: (param) => new AggregationOperator('second', param),
hour: (param) => new AggregationOperator('hour', param),
// 'toDate', 4.0才有
week: (param) => new AggregationOperator('week', param),
year: (param) => new AggregationOperator('year', param),
// 字面操作符
literal: (param) => new AggregationOperator('literal', param),
// 对象操作符
mergeObjects: (param) => new AggregationOperator('mergeObjects', param),
objectToArray: (param) => new AggregationOperator('objectToArray', param),
// 集合操作符(7个)
allElementsTrue: (param) => new AggregationOperator('allElementsTrue', param),
anyElementTrue: (param) => new AggregationOperator('anyElementTrue', param),
setDifference: (param) => new AggregationOperator('setDifference', param),
setEquals: (param) => new AggregationOperator('setEquals', param),
setIntersection: (param) => new AggregationOperator('setIntersection', param),
setIsSubset: (param) => new AggregationOperator('setIsSubset', param),
setUnion: (param) => new AggregationOperator('setUnion', param),
// 字符串操作符(13个)
concat: (param) => new AggregationOperator('concat', param),
dateToString: (param) => new AggregationOperator('dateToString', param),
indexOfBytes: (param) => new AggregationOperator('indexOfBytes', param),
indexOfCP: (param) => new AggregationOperator('indexOfCP', param),
// 'ltrim',
// 'rtrim',
split: (param) => new AggregationOperator('split', param),
strLenBytes: (param) => new AggregationOperator('strLenBytes', param),
strLenCP: (param) => new AggregationOperator('strLenCP', param),
strcasecmp: (param) => new AggregationOperator('strcasecmp', param),
substr: (param) => new AggregationOperator('substr', param),
substrBytes: (param) => new AggregationOperator('substrBytes', param),
substrCP: (param) => new AggregationOperator('substrCP', param),
toLower: (param) => new AggregationOperator('toLower', param),
// 'toString'
// 'trim'
toUpper: (param) => new AggregationOperator('toUpper', param),
// 文本操作符
meta: (param) => new AggregationOperator('meta', param),
// group操作符(10个)
addToSet: (param) => new AggregationOperator('addToSet', param),
avg: (param) => new AggregationOperator('avg', param),
first: (param) => new AggregationOperator('first', param),
last: (param) => new AggregationOperator('last', param),
max: (param) => new AggregationOperator('max', param),
min: (param) => new AggregationOperator('min', param),
push: (param) => new AggregationOperator('push', param),
stdDevPop: (param) => new AggregationOperator('stdDevPop', param),
stdDevSamp: (param) => new AggregationOperator('stdDevSamp', param),
sum: (param) => new AggregationOperator('sum', param),
// 变量声明操作符
let: (param) => new AggregationOperator('let', param)
},
project: {
slice: (param) => new ProjectionOperator('slice', param),
elemMatch: (param) => new ProjectionOperator('elemMatch', param)
}
}
class AggregationOperator {
constructor(name, param) {
this['$' + name] = param
}
}
class ProjectionOperator {
constructor(name, param) {
this['$' + name] = param
}
}
export default Command | the_stack |
import { decodeEUCJP, encodeEUCJP } from "./euc_jp";
import { decodeZipCode, ZipCode, zipCodeInclude } from "./zip_code";
export interface BinaryTableConstructor {
new(table_ref: string, structure: string): IBinaryTable;// | null;
}
export interface IBinaryTable {
nrow: number;
ncolumn: number;
close(): number;
toNumber(row: number, column: number): number;
toString(row: number, column: number): string | null;
toArray(startRow: number, numRow: number): any[] | null;
search(startRow: number, ...args: any[]): number;
}
enum BinaryTableUnit {
Byte = "B",
Bit = "b",
Variable = "V",
}
enum BinaryTableType {
Boolean = "B",
UnsignedInteger = "U",
Integer = "I",
String = "S",
ZipCode = "Z",
Pad = "P",
}
type BinaryTableField = {
unit: BinaryTableUnit,
length: number,
type: BinaryTableType,
}
enum SearchOperator {
Equal = 0,
NotEqual,
Less, // <
LessEqual, // <=
Greater, // >
GreaterEqual, // >=
And, // &
Or, // |
Xor, // ^
Nand, // ~&
Nor, // ~|
Nxor, // ~^
StringMatches = 32,
StringIncludes,
StringStarsWith,
StringEndsWith,
StringNotMatch,
StringNotInclude,
BoolEqualTo = 64,
BoolNotEqualTo,
ZipInclude = 96,
ZipNotInclude = 97,
}
function readBits(posBits: number, bits: number, buffer: Uint8Array): [value: number, posBits: number] {
let value = 0;
for (let i = 0; i < bits; i++) {
value <<= 1;
value |= (buffer[posBits >> 3] & (1 << (7 - (posBits & 7)))) ? 1 : 0;
posBits++;
}
return [value, posBits];
}
function writeBits(posBits: number, bits: number, buffer: Uint8Array, value: number): number {
for (let i = bits - 1; i >= 0; i--) {
const b = 1 << (7 - (posBits & 7));
buffer[posBits >> 3] &= ~b;
if (value & (1 << i)) {
buffer[posBits >> 3] |= b;
}
posBits++;
}
return posBits;
}
export function parseBinaryStructure(structure: string): BinaryTableField[] | null {
const sep = structure.split(",");
const fields: BinaryTableField[] = [];
for (const s of sep) {
const groups = s.match(/^(?<type>[BUISZP]):(?<length>[1-9][0-9]*)(?<unit>[BbVv])$/)?.groups;
if (!groups) {
return null;
}
const type = groups["type"] as BinaryTableType;
const length = Number.parseInt(groups["length"]);
let unit = groups["unit"] as BinaryTableUnit;
if (unit === "v" as BinaryTableUnit) {
unit = BinaryTableUnit.Variable;
}
fields.push({ type, length, unit })
}
return fields;
}
export function readBinaryFields(buffer: Uint8Array, fields: BinaryTableField[]): [result: any[], readBits: number] {
let posBits = 0;
const columns: any[] = [];
for (const field of fields) {
let fieldData: any = null;
switch (field.type) {
case BinaryTableType.Boolean:
if (field.unit !== BinaryTableUnit.Bit) {
throw new Error("FIXME");
}
if (field.length !== 1) {
throw new Error("FIXME");
}
[fieldData, posBits] = readBits(posBits, field.length, buffer);
fieldData = fieldData != 0;
break;
case BinaryTableType.UnsignedInteger:
let lengthInBits: number;
if (field.unit === BinaryTableUnit.Bit) {
lengthInBits = field.length;
} else if (field.unit === BinaryTableUnit.Byte) {
lengthInBits = field.length * 8;
} else {
throw new Error("FIXME");
}
if (lengthInBits > 32) {
throw new Error("FIXME");
}
[fieldData, posBits] = readBits(posBits, lengthInBits, buffer);
break;
case BinaryTableType.Integer:
if (field.unit !== BinaryTableUnit.Byte) {
throw new Error("must be byte");
}
if ((posBits & 7) !== 0) {
throw new Error("must be byte aligned");
}
if (field.length === 1) {
fieldData = buffer[posBits >> 3] << (32 - 8) >> (32 - 8);
posBits += 8;
} else if (field.length === 2) {
fieldData = ((buffer[(posBits >> 3) + 0] << 8) | buffer[(posBits >> 3) + 1]) << (32 - 16) >> (32 - 16);
posBits += 16;
} else if (field.length === 4) {
fieldData = ((buffer[(posBits >> 3) + 0] << 24) | (buffer[(posBits >> 3) + 1] << 16) | (buffer[(posBits >> 3) + 2] << 8) | (buffer[(posBits >> 3) + 3] << 0));
posBits += 32;
} else {
throw new Error("length must be 1, 2, or 4");
}
break;
case BinaryTableType.String:
if ((posBits & 7) !== 0) {
throw new Error("string must be byte aligned");
}
let lengthByte: number;
if (field.unit === BinaryTableUnit.Byte) {
lengthByte = field.length;
} else if (field.unit === BinaryTableUnit.Variable) {
[lengthByte, posBits] = readBits(posBits, field.length * 8, buffer);
} else {
throw new Error("string must be byte or variable");
}
const decoded = decodeEUCJP(buffer.slice(posBits >> 3, (posBits >> 3) + lengthByte));
// なにも設定されていなければ空文字列(TR-B14, TR-B15) => null文字以降切り捨てとする
const nullIndex = decoded.indexOf("\u0000");
if (nullIndex !== -1) {
fieldData = decoded.substring(0, nullIndex);
} else {
fieldData = decoded;
}
posBits += lengthByte * 8;
break;
case BinaryTableType.Pad:
if (field.unit === BinaryTableUnit.Byte) {
posBits += field.length * 8;
} else if (field.unit === BinaryTableUnit.Bit) {
posBits += field.length;
} else {
throw new Error("variable not allowed");
}
break;
case BinaryTableType.ZipCode:
{
if ((posBits & 7) !== 0) {
throw new Error("zip code must be byte aligned");
}
if (field.unit !== BinaryTableUnit.Variable) {
throw new Error("zip code must be variable");
}
let lengthByte: number;
[lengthByte, posBits] = readBits(posBits, field.length * 8, buffer);
const zip = buffer.slice((posBits >> 3), (posBits >> 3) + lengthByte);
fieldData = decodeZipCode(zip);
posBits += lengthByte * 8;
}
break;
}
if (fieldData != null) {
columns.push(fieldData);
}
}
return [columns, posBits];
}
export function writeBinaryFields(data: any[], fields: BinaryTableField[]): Uint8Array {
if (data.length < fields.length) {
throw new Error("FIXME");
}
let sizeBits = 0;
for (let i = 0; i < fields.length; i++) {
const field = fields[i];
switch (field.type) {
case BinaryTableType.Boolean:
if (field.unit !== BinaryTableUnit.Bit) {
throw new Error("FIXME");
}
if (field.length !== 1) {
throw new Error("FIXME");
}
sizeBits++;
break;
case BinaryTableType.UnsignedInteger:
if (field.unit === BinaryTableUnit.Bit) {
sizeBits += field.length;
} else if (field.unit === BinaryTableUnit.Byte) {
sizeBits += field.length * 8;
} else {
throw new Error("FIXME");
}
break;
case BinaryTableType.Integer:
if (field.unit !== BinaryTableUnit.Byte) {
throw new Error("must be byte");
}
if ((sizeBits & 7) !== 0) {
throw new Error("must be byte aligned");
}
if (field.length === 1) {
sizeBits += 8;
} else if (field.length === 2) {
sizeBits += 16;
} else if (field.length === 4) {
sizeBits += 32;
} else {
throw new Error("length must be 1, 2, or 4");
}
break;
case BinaryTableType.String:
if ((sizeBits & 7) !== 0) {
throw new Error("string must be byte aligned");
}
if (field.unit === BinaryTableUnit.Byte) {
sizeBits += field.length * 8;
} else if (field.unit === BinaryTableUnit.Variable) {
sizeBits += field.length * 8;
let encoded = new Uint8Array(encodeEUCJP(data[i]));
sizeBits += encoded.length * 8;
} else {
throw new Error("string must be byte or variable");
}
break;
case BinaryTableType.Pad:
if (field.unit === BinaryTableUnit.Byte) {
sizeBits += field.length * 8;
} else if (field.unit === BinaryTableUnit.Bit) {
sizeBits += field.length;
} else {
throw new Error("variable not allowed");
}
break;
case BinaryTableType.ZipCode:
throw new Error("Z is not allowed");
}
}
const buffer = new Uint8Array((sizeBits + 7) >> 3);
let posBits = 0;
for (let i = 0; i < fields.length; i++) {
const field = fields[i];
switch (field.type) {
case BinaryTableType.Boolean:
if (field.unit !== BinaryTableUnit.Bit) {
throw new Error("FIXME");
}
if (field.length !== 1) {
throw new Error("FIXME");
}
posBits = writeBits(posBits, 1, buffer, data[i] ? 1 : 0);
break;
case BinaryTableType.UnsignedInteger:
let lengthInBits: number;
if (field.unit === BinaryTableUnit.Bit) {
lengthInBits = field.length;
} else if (field.unit === BinaryTableUnit.Byte) {
lengthInBits = field.length * 8;
} else {
throw new Error("FIXME");
}
if (lengthInBits > 32) {
throw new Error("FIXME");
}
posBits = writeBits(posBits, lengthInBits, buffer, Number(data[i]));
break;
case BinaryTableType.Integer:
if (field.unit !== BinaryTableUnit.Byte) {
throw new Error("must be byte");
}
if ((posBits & 7) !== 0) {
throw new Error("must be byte aligned");
}
if (field.length === 1) {
buffer[posBits >> 3] = Number(data[i]);
posBits += 8;
} else if (field.length === 2) {
buffer[posBits >> 3] = Number(data[i] >> 8);
posBits += 8;
buffer[posBits >> 3] = Number(data[i]);
posBits += 8;
} else if (field.length === 4) {
buffer[posBits >> 3] = Number(data[i] >> 24);
posBits += 8;
buffer[posBits >> 3] = Number(data[i] >> 16);
posBits += 8;
buffer[posBits >> 3] = Number(data[i] >> 8);
posBits += 8;
buffer[posBits >> 3] = Number(data[i]);
posBits += 8;
} else {
throw new Error("length must be 1, 2, or 4");
}
break;
case BinaryTableType.String:
if ((posBits & 7) !== 0) {
throw new Error("string must be byte aligned");
}
const encoded = encodeEUCJP(data[i]);
if (field.unit === BinaryTableUnit.Byte) {
if (encoded.length === field.length) {
buffer.set(encoded, posBits >> 3);
posBits += field.length * 8;
} else if (encoded.length > field.length) {
// 切り捨てる (TR-B14 第三分冊 8.1.15.6参照)
// EUC-JP的に不完全な文字列だと死にそう
buffer.set(encoded.subarray(0, field.length), posBits >> 3);
posBits += field.length * 8;
} else {
// スペースを付加 (TR-B14 第三分冊 8.1.15.6参照)
buffer.set(encoded, posBits >> 3);
posBits += encoded.length * 8;
for (let i = encoded.length; i < field.length; i++) {
buffer[posBits >> 3] = 0x20;
posBits += 8;
}
}
} else if (field.unit === BinaryTableUnit.Variable) {
posBits = writeBits(posBits, field.length * 8, buffer, encoded.length);
buffer.set(encoded, posBits >> 3);
posBits += encoded.length * 8;
} else {
throw new Error("string must be byte or variable");
}
break;
case BinaryTableType.Pad:
if (field.unit === BinaryTableUnit.Byte) {
posBits += field.length * 8;
} else if (field.unit === BinaryTableUnit.Bit) {
posBits += field.length;
} else {
throw new Error("variable not allowed");
}
break;
case BinaryTableType.ZipCode:
throw new Error("Z is not allowed");
}
}
return buffer;
}
export class BinaryTable implements IBinaryTable {
rows: any[][];
fields: BinaryTableField[];
constructor(table: Uint8Array, structure: string) {
const { rows, fields } = BinaryTable.constructBinaryTable(table, structure);
this.rows = rows;
this.fields = fields;
}
static constructBinaryTable(buffer: Uint8Array, structure: string): { rows: any[][], fields: BinaryTableField[] } {
const sep = structure.split(",");
if (sep.length < 2) {
throw new Error("FIXME");
}
if (!sep[0].match(/^(?<lengthByte>[1-9][0-9]*|0)$/)) {
throw new Error("FIXME");
}
const lengthByte = Number.parseInt(sep[0]);
let posBits = 0;
const fields = parseBinaryStructure(structure.substring(structure.indexOf(",") + 1));
if (!fields) {
throw new Error("FIXME: failed to parse structure");
}
const rows: Array<Array<any>> = [];
while (posBits < buffer.length * 8) {
let length = 0;
if (lengthByte) {
[length, posBits] = readBits(posBits, 8 * lengthByte, buffer);
}
let [columns, read] = readBinaryFields(buffer.slice(posBits >> 3), fields);
posBits += lengthByte ? length * 8 : read;
rows.push(columns);
}
return { rows, fields };
//const regex = /^(?<lengthByte>[1-9][0-9]*|0)(,(?<type>[BUISZP]):(?<length>[1-9][0-9]*)(?<unit>[BbV]))+$/;
}
public get nrow(): number {
return this.rows.length;
}
public get ncolumn(): number {
return this.fields.length;
}
public close(): number {
return 0;
}
public toNumber(row: number, column: number): number {
return Number((this.rows[row] ?? [])[column]);
}
public toString(row: number, column: number): string | null {
return (this.rows[row] ?? [])[column]?.toString();
}
public toArray(startRow: number, numRow: number): any[][] | null {
return this.rows.slice(startRow, startRow + numRow).map(x => {
return x.map((v, i) => {
if (this.fields[i].type === BinaryTableType.ZipCode) {
return null;
} else {
return v;
}
})
});
}
public search(startRow: number, ...args: any[]): number {
if (args.length % 3 !== 0 || args.length < 6) {
throw new TypeError("argument");
}
// 条件は4つまで
if (args.length > (1 + 3 + 4 * 4)) {
throw new TypeError("argument");
}
const logic = args[args.length - 3] as boolean;
const limitCount = args[args.length - 2] as number;
const resultArray = args[args.length - 1] as any[];
resultArray.length = 0;
for (let i = startRow; i < this.rows.length; i++) {
let results = new Array(args.length / 3 - 1);
for (let c = 0; c < args.length - 3; c += 3) {
const searchedColumn = args[c] as number;
const compared = args[c + 1] as any;
const operator = args[c + 2] as SearchOperator;
const column = this.rows[i][searchedColumn];
let result = false;
switch (operator) {
case SearchOperator.Equal:
result = column == Number(compared);
break;
case SearchOperator.NotEqual:
result = column != Number(compared);
break;
case SearchOperator.Less: // <
result = column < Number(compared);
break;
case SearchOperator.LessEqual: // <=
result = column <= Number(compared);
break;
case SearchOperator.Greater: // >
result = column > Number(compared);
break;
case SearchOperator.GreaterEqual: // >=
result = column >= Number(compared);
break;
case SearchOperator.And: // &
result = !!(column & Number(compared));
break;
case SearchOperator.Or: // |
result = !!(column | Number(compared));
break;
case SearchOperator.Xor: // ^
result = !!(column ^ Number(compared));
break;
case SearchOperator.Nand: // ~&
result = !!~(column & Number(compared));
break;
case SearchOperator.Nor: // ~|
result = !!~(column | Number(compared));
break;
case SearchOperator.Nxor: // ~^
result = !!~(column ^ Number(compared));
break;
case SearchOperator.StringMatches:
result = column === String(compared);
break;
case SearchOperator.StringIncludes:
result = String(column).includes(compared);
break;
case SearchOperator.StringStarsWith:
result = String(column).startsWith(compared);
break;
case SearchOperator.StringEndsWith:
result = String(column).endsWith(compared);
break;
case SearchOperator.StringNotMatch:
result = column !== String(compared);
break;
case SearchOperator.StringNotInclude:
result = !String(column).includes(compared);
break;
case SearchOperator.BoolEqualTo:
result = column === Boolean(compared);
break;
case SearchOperator.BoolNotEqualTo:
result = column !== Boolean(compared);
break;
case SearchOperator.ZipInclude:
result = zipCodeInclude(column as ZipCode, Number(compared))
break;
case SearchOperator.ZipNotInclude:
result = !zipCodeInclude(column as ZipCode, Number(compared))
break;
}
results[c / 3] = result;
}
// logic: true => OR
// logic: false => AND
let result: boolean;
if (logic) {
result = results.some(x => x);
} else {
result = results.every(x => x);
}
if (result) {
resultArray.push(this.rows[i].map((v, j) => {
if (this.fields[j].type === BinaryTableType.ZipCode) {
let found = false;
for (let c = 0; c < args.length - 3; c += 3) {
const searchedColumn = args[c] as number;
if (searchedColumn === j) {
if (results[c / 3]) {
return true;
} else {
// 同じ列に対して複数の検索条件が指定される可能性
found = true;
}
}
}
if (found) {
return false;
}
return null;
}
return v;
}));
}
if (resultArray.length >= limitCount) {
return i;
}
}
return -1;
}
} | the_stack |
import { iterableFirst, setFilter, setUnionManyInto, setSubtract, mapMap, mapSome, setMap } from "collection-utils";
import { Type, ClassType, UnionType, IntersectionType } from "./Type";
import { separateNamedTypes, SeparatedNamedTypes, isNamedType, combineTypeAttributesOfTypes } from "./TypeUtils";
import { defined, assert, panic, mustNotHappen } from "./support/Support";
import { TypeBuilder, StringTypeMapping, getNoStringTypeMapping, provenanceTypeAttributeKind } from "./TypeBuilder";
import { GraphRewriteBuilder, GraphRemapBuilder, BaseGraphRewriteBuilder } from "./GraphRewriting";
import { TypeNames, namesTypeAttributeKind } from "./attributes/TypeNames";
import { Graph } from "./Graph";
import { TypeAttributeKind, TypeAttributes, emptyTypeAttributes } from "./attributes/TypeAttributes";
import { messageError } from "./Messages";
export type TypeRef = number;
const indexBits = 26;
const indexMask = (1 << indexBits) - 1;
const serialBits = 31 - indexBits;
const serialMask = (1 << serialBits) - 1;
export function isTypeRef(x: any): x is TypeRef {
return typeof x === "number";
}
export function makeTypeRef(graph: TypeGraph, index: number): TypeRef {
assert(index <= indexMask, "Too many types in graph");
return ((graph.serial & serialMask) << indexBits) | index;
}
export function typeRefIndex(tref: TypeRef): number {
return tref & indexMask;
}
export function assertTypeRefGraph(tref: TypeRef, graph: TypeGraph): void {
assert(
((tref >> indexBits) & serialMask) === (graph.serial & serialMask),
"Mixing the wrong type reference and graph"
);
}
function getGraph(graphOrBuilder: TypeGraph | BaseGraphRewriteBuilder): TypeGraph {
if (graphOrBuilder instanceof TypeGraph) return graphOrBuilder;
return graphOrBuilder.originalGraph;
}
export function derefTypeRef(tref: TypeRef, graphOrBuilder: TypeGraph | BaseGraphRewriteBuilder): Type {
const graph = getGraph(graphOrBuilder);
assertTypeRefGraph(tref, graph);
return graph.typeAtIndex(typeRefIndex(tref));
}
export function attributesForTypeRef(
tref: TypeRef,
graphOrBuilder: TypeGraph | BaseGraphRewriteBuilder
): TypeAttributes {
const graph = getGraph(graphOrBuilder);
assertTypeRefGraph(tref, graph);
return graph.atIndex(typeRefIndex(tref))[1];
}
export function typeAndAttributesForTypeRef(
tref: TypeRef,
graphOrBuilder: TypeGraph | BaseGraphRewriteBuilder
): [Type, TypeAttributes] {
const graph = getGraph(graphOrBuilder);
assertTypeRefGraph(tref, graph);
return graph.atIndex(typeRefIndex(tref));
}
export class TypeAttributeStore {
private readonly _topLevelValues: Map<string, TypeAttributes> = new Map();
constructor(private readonly _typeGraph: TypeGraph, private _values: (TypeAttributes | undefined)[]) {}
private getTypeIndex(t: Type): number {
const tref = t.typeRef;
assertTypeRefGraph(tref, this._typeGraph);
return typeRefIndex(tref);
}
attributesForType(t: Type): TypeAttributes {
const index = this.getTypeIndex(t);
const maybeAttributes = this._values[index];
if (maybeAttributes !== undefined) {
return maybeAttributes;
}
return emptyTypeAttributes;
}
attributesForTopLevel(name: string): TypeAttributes {
const maybeAttributes = this._topLevelValues.get(name);
if (maybeAttributes !== undefined) {
return maybeAttributes;
}
return emptyTypeAttributes;
}
private setInMap<T>(attributes: TypeAttributes, kind: TypeAttributeKind<T>, value: T): TypeAttributes {
// FIXME: This is potentially super slow
return new Map(attributes).set(kind, value);
}
set<T>(kind: TypeAttributeKind<T>, t: Type, value: T): void {
const index = this.getTypeIndex(t);
while (index >= this._values.length) {
this._values.push(undefined);
}
this._values[index] = this.setInMap(this.attributesForType(t), kind, value);
}
setForTopLevel<T>(kind: TypeAttributeKind<T>, topLevelName: string, value: T): void {
this._topLevelValues.set(topLevelName, this.setInMap(this.attributesForTopLevel(topLevelName), kind, value));
}
private tryGetInMap<T>(attributes: TypeAttributes, kind: TypeAttributeKind<T>): T | undefined {
return attributes.get(kind);
}
tryGet<T>(kind: TypeAttributeKind<T>, t: Type): T | undefined {
return this.tryGetInMap(this.attributesForType(t), kind);
}
tryGetForTopLevel<T>(kind: TypeAttributeKind<T>, topLevelName: string): T | undefined {
return this.tryGetInMap(this.attributesForTopLevel(topLevelName), kind);
}
}
export class TypeAttributeStoreView<T> {
constructor(
private readonly _attributeStore: TypeAttributeStore,
private readonly _definition: TypeAttributeKind<T>
) {}
set(t: Type, value: T): void {
this._attributeStore.set(this._definition, t, value);
}
setForTopLevel(name: string, value: T): void {
this._attributeStore.setForTopLevel(this._definition, name, value);
}
tryGet(t: Type): T | undefined {
return this._attributeStore.tryGet(this._definition, t);
}
get(t: Type): T {
return defined(this.tryGet(t));
}
tryGetForTopLevel(name: string): T | undefined {
return this._attributeStore.tryGetForTopLevel(this._definition, name);
}
getForTopLevel(name: string): T {
return defined(this.tryGetForTopLevel(name));
}
}
export class TypeGraph {
private _typeBuilder?: TypeBuilder;
private _attributeStore: TypeAttributeStore | undefined = undefined;
// FIXME: OrderedMap? We lose the order in PureScript right now, though,
// and maybe even earlier in the TypeScript driver.
private _topLevels: Map<string, Type> = new Map();
private _types?: Type[];
private _parents: Set<Type>[] | undefined = undefined;
private _printOnRewrite: boolean = false;
constructor(
typeBuilder: TypeBuilder,
readonly serial: number,
private readonly _haveProvenanceAttributes: boolean
) {
this._typeBuilder = typeBuilder;
}
private get isFrozen(): boolean {
return this._typeBuilder === undefined;
}
get attributeStore(): TypeAttributeStore {
return defined(this._attributeStore);
}
freeze(
topLevels: ReadonlyMap<string, TypeRef>,
types: Type[],
typeAttributes: (TypeAttributes | undefined)[]
): void {
assert(!this.isFrozen, "Tried to freeze TypeGraph a second time");
for (const t of types) {
assertTypeRefGraph(t.typeRef, this);
}
this._attributeStore = new TypeAttributeStore(this, typeAttributes);
// The order of these three statements matters. If we set _typeBuilder
// to undefined before we deref the TypeRefs, then we need to set _types
// before, also, because the deref will call into typeAtIndex, which requires
// either a _typeBuilder or a _types.
this._types = types;
this._typeBuilder = undefined;
this._topLevels = mapMap(topLevels, tref => derefTypeRef(tref, this));
}
get topLevels(): ReadonlyMap<string, Type> {
assert(this.isFrozen, "Cannot get top-levels from a non-frozen graph");
return this._topLevels;
}
typeAtIndex(index: number): Type {
if (this._typeBuilder !== undefined) {
return this._typeBuilder.typeAtIndex(index);
}
return defined(this._types)[index];
}
atIndex(index: number): [Type, TypeAttributes] {
if (this._typeBuilder !== undefined) {
return this._typeBuilder.atIndex(index);
}
const t = this.typeAtIndex(index);
return [t, defined(this._attributeStore).attributesForType(t)];
}
private filterTypes(predicate: ((t: Type) => boolean) | undefined): ReadonlySet<Type> {
const seen = new Set<Type>();
let types: Type[] = [];
function addFromType(t: Type): void {
if (seen.has(t)) return;
seen.add(t);
const required = predicate === undefined || predicate(t);
if (required) {
types.push(t);
}
for (const c of t.getChildren()) {
addFromType(c);
}
}
for (const [, t] of this.topLevels) {
addFromType(t);
}
return new Set(types);
}
allNamedTypes(): ReadonlySet<Type> {
return this.filterTypes(isNamedType);
}
allNamedTypesSeparated(): SeparatedNamedTypes {
const types = this.allNamedTypes();
return separateNamedTypes(types);
}
private allProvenance(): ReadonlySet<number> {
assert(this._haveProvenanceAttributes);
const view = new TypeAttributeStoreView(this.attributeStore, provenanceTypeAttributeKind);
const sets = Array.from(this.allTypesUnordered()).map(t => {
const maybeSet = view.tryGet(t);
if (maybeSet !== undefined) return maybeSet;
return new Set();
});
const result = new Set();
setUnionManyInto(result, sets);
return result;
}
setPrintOnRewrite(): void {
this._printOnRewrite = true;
}
private checkLostTypeAttributes(builder: BaseGraphRewriteBuilder, newGraph: TypeGraph): void {
if (!this._haveProvenanceAttributes || builder.lostTypeAttributes) return;
const oldProvenance = this.allProvenance();
const newProvenance = newGraph.allProvenance();
if (oldProvenance.size !== newProvenance.size) {
const difference = setSubtract(oldProvenance, newProvenance);
const indexes = Array.from(difference);
return messageError("IRTypeAttributesNotPropagated", { count: difference.size, indexes });
}
}
private printRewrite(title: string): void {
if (!this._printOnRewrite) return;
console.log(`\n# ${title}`);
}
// Each array in `replacementGroups` is a bunch of types to be replaced by a
// single new type. `replacer` is a function that takes a group and a
// TypeBuilder, and builds a new type with that builder that replaces the group.
// That particular TypeBuilder will have to take as inputs types in the old
// graph, but return types in the new graph. Recursive types must be handled
// carefully.
rewrite<T extends Type>(
title: string,
stringTypeMapping: StringTypeMapping,
alphabetizeProperties: boolean,
replacementGroups: T[][],
debugPrintReconstitution: boolean,
replacer: (typesToReplace: ReadonlySet<T>, builder: GraphRewriteBuilder<T>, forwardingRef: TypeRef) => TypeRef,
force: boolean = false
): TypeGraph {
this.printRewrite(title);
if (!force && replacementGroups.length === 0) return this;
const builder = new GraphRewriteBuilder(
this,
stringTypeMapping,
alphabetizeProperties,
this._haveProvenanceAttributes,
replacementGroups,
debugPrintReconstitution,
replacer
);
const newGraph = builder.finish();
this.checkLostTypeAttributes(builder, newGraph);
if (this._printOnRewrite) {
newGraph.setPrintOnRewrite();
newGraph.printGraph();
}
if (!builder.didAddForwardingIntersection) return newGraph;
return removeIndirectionIntersections(newGraph, stringTypeMapping, debugPrintReconstitution);
}
remap(
title: string,
stringTypeMapping: StringTypeMapping,
alphabetizeProperties: boolean,
map: ReadonlyMap<Type, Type>,
debugPrintRemapping: boolean,
force: boolean = false
): TypeGraph {
this.printRewrite(title);
if (!force && map.size === 0) return this;
const builder = new GraphRemapBuilder(
this,
stringTypeMapping,
alphabetizeProperties,
this._haveProvenanceAttributes,
map,
debugPrintRemapping
);
const newGraph = builder.finish();
this.checkLostTypeAttributes(builder, newGraph);
if (this._printOnRewrite) {
newGraph.setPrintOnRewrite();
newGraph.printGraph();
}
assert(!builder.didAddForwardingIntersection);
return newGraph;
}
garbageCollect(alphabetizeProperties: boolean, debugPrintReconstitution: boolean): TypeGraph {
const newGraph = this.remap(
"GC",
getNoStringTypeMapping(),
alphabetizeProperties,
new Map(),
debugPrintReconstitution,
true
);
return newGraph;
}
rewriteFixedPoint(alphabetizeProperties: boolean, debugPrintReconstitution: boolean): TypeGraph {
let graph: TypeGraph = this;
for (;;) {
const newGraph = this.rewrite(
"fixed-point",
getNoStringTypeMapping(),
alphabetizeProperties,
[],
debugPrintReconstitution,
mustNotHappen,
true
);
if (graph.allTypesUnordered().size === newGraph.allTypesUnordered().size) {
return graph;
}
graph = newGraph;
}
}
allTypesUnordered(): ReadonlySet<Type> {
assert(this.isFrozen, "Tried to get all graph types before it was frozen");
return new Set(defined(this._types));
}
makeGraph(invertDirection: boolean, childrenOfType: (t: Type) => ReadonlySet<Type>): Graph<Type> {
return new Graph(defined(this._types), invertDirection, childrenOfType);
}
getParentsOfType(t: Type): Set<Type> {
assertTypeRefGraph(t.typeRef, this);
if (this._parents === undefined) {
const parents = defined(this._types).map(_ => new Set());
for (const p of this.allTypesUnordered()) {
for (const c of p.getChildren()) {
const index = c.index;
parents[index] = parents[index].add(p);
}
}
this._parents = parents;
}
return this._parents[t.index];
}
printGraph(): void {
const types = defined(this._types);
for (let i = 0; i < types.length; i++) {
const t = types[i];
const parts: string[] = [];
parts.push(`${t.debugPrintKind}${t.hasNames ? ` ${t.getCombinedName()}` : ""}`);
const children = t.getChildren();
if (children.size > 0) {
parts.push(
`children ${Array.from(children)
.map(c => c.index)
.join(",")}`
);
}
for (const [kind, value] of t.getAttributes()) {
const maybeString = kind.stringify(value);
if (maybeString !== undefined) {
parts.push(maybeString);
}
}
console.log(`${i}: ${parts.join(" | ")}`);
}
}
}
export function noneToAny(
graph: TypeGraph,
stringTypeMapping: StringTypeMapping,
debugPrintReconstitution: boolean
): TypeGraph {
const noneTypes = setFilter(graph.allTypesUnordered(), t => t.kind === "none");
if (noneTypes.size === 0) {
return graph;
}
assert(noneTypes.size === 1, "Cannot have more than one none type");
return graph.rewrite(
"none to any",
stringTypeMapping,
false,
[Array.from(noneTypes)],
debugPrintReconstitution,
(types, builder, forwardingRef) => {
const attributes = combineTypeAttributesOfTypes("union", types);
const tref = builder.getPrimitiveType("any", attributes, forwardingRef);
return tref;
}
);
}
export function optionalToNullable(
graph: TypeGraph,
stringTypeMapping: StringTypeMapping,
debugPrintReconstitution: boolean
): TypeGraph {
function rewriteClass(c: ClassType, builder: GraphRewriteBuilder<ClassType>, forwardingRef: TypeRef): TypeRef {
const properties = mapMap(c.getProperties(), (p, name) => {
const t = p.type;
let ref: TypeRef;
if (!p.isOptional || t.isNullable) {
ref = builder.reconstituteType(t);
} else {
const nullType = builder.getPrimitiveType("null");
let members: ReadonlySet<TypeRef>;
if (t instanceof UnionType) {
members = setMap(t.members, m => builder.reconstituteType(m)).add(nullType);
} else {
members = new Set([builder.reconstituteType(t), nullType]);
}
const attributes = namesTypeAttributeKind.setDefaultInAttributes(t.getAttributes(), () =>
TypeNames.make(new Set([name]), new Set(), true)
);
ref = builder.getUnionType(attributes, members);
}
return builder.makeClassProperty(ref, false);
});
if (c.isFixed) {
return builder.getUniqueClassType(c.getAttributes(), true, properties, forwardingRef);
} else {
return builder.getClassType(c.getAttributes(), properties, forwardingRef);
}
}
const classesWithOptional = setFilter(
graph.allTypesUnordered(),
t => t instanceof ClassType && mapSome(t.getProperties(), p => p.isOptional)
);
const replacementGroups = Array.from(classesWithOptional).map(c => [c as ClassType]);
if (classesWithOptional.size === 0) {
return graph;
}
return graph.rewrite(
"optional to nullable",
stringTypeMapping,
false,
replacementGroups,
debugPrintReconstitution,
(setOfClass, builder, forwardingRef) => {
assert(setOfClass.size === 1);
const c = defined(iterableFirst(setOfClass));
return rewriteClass(c, builder, forwardingRef);
}
);
}
export function removeIndirectionIntersections(
graph: TypeGraph,
stringTypeMapping: StringTypeMapping,
debugPrintRemapping: boolean
): TypeGraph {
const map: [Type, Type][] = [];
for (const t of graph.allTypesUnordered()) {
if (!(t instanceof IntersectionType)) continue;
const seen = new Set([t]);
let current = t;
while (current.members.size === 1) {
const member = defined(iterableFirst(current.members));
if (!(member instanceof IntersectionType)) {
map.push([t, member]);
break;
}
if (seen.has(member)) {
// FIXME: Technically, this is an any type.
return panic("There's a cycle of intersection types");
}
seen.add(member);
current = member;
}
}
return graph.remap("remove indirection intersections", stringTypeMapping, false, new Map(map), debugPrintRemapping);
} | the_stack |
import * as uint8arrays from "uint8arrays"
import { AddResult, CID } from "../../ipfs/index.js"
import { BareNameFilter } from "../protocol/private/namefilter.js"
import { Links, Puttable, SimpleLink } from "../types.js"
import { Branch, DistinctivePath } from "../../path.js"
import { Maybe } from "../../common/index.js"
import { Permissions } from "../../ucan/permissions.js"
import * as crypto from "../../crypto/index.js"
import * as identifiers from "../../common/identifiers.js"
import * as ipfs from "../../ipfs/index.js"
import * as link from "../link.js"
import * as pathing from "../../path.js"
import * as protocol from "../protocol/index.js"
import * as versions from "../versions.js"
import * as storage from "../../storage/index.js"
import * as ucanPermissions from "../../ucan/permissions.js"
import BareTree from "../bare/tree.js"
import MMPT from "../protocol/private/mmpt.js"
import PublicTree from "../v1/PublicTree.js"
import PrivateTree from "../v1/PrivateTree.js"
import PrivateFile from "../v1/PrivateFile.js"
type PrivateNode = PrivateTree | PrivateFile
export default class RootTree implements Puttable {
links: Links
mmpt: MMPT
privateLog: Array<SimpleLink>
publicTree: PublicTree
prettyTree: BareTree
privateNodes: Record<string, PrivateNode>
constructor({ links, mmpt, privateLog, publicTree, prettyTree, privateNodes }: {
links: Links
mmpt: MMPT
privateLog: Array<SimpleLink>
publicTree: PublicTree
prettyTree: BareTree
privateNodes: Record<string, PrivateNode>
}) {
this.links = links
this.mmpt = mmpt
this.privateLog = privateLog
this.publicTree = publicTree
this.prettyTree = prettyTree
this.privateNodes = privateNodes
}
// INITIALISATION
// --------------
static async empty({ rootKey }: { rootKey: string }): Promise<RootTree> {
const publicTree = await PublicTree.empty()
const prettyTree = await BareTree.empty()
const mmpt = MMPT.create()
// Private tree
const rootPath = pathing.toPosix(pathing.directory(pathing.Branch.Private))
const rootTree = await PrivateTree.create(mmpt, rootKey, null)
await rootTree.put()
// Construct tree
const tree = new RootTree({
links: {},
mmpt,
privateLog: [],
publicTree,
prettyTree,
privateNodes: {
[rootPath]: rootTree
}
})
// Store root key
await RootTree.storeRootKey(rootKey)
// Set version and store new sub trees
await tree.setVersion(versions.latest)
await Promise.all([
tree.updatePuttable(Branch.Public, publicTree),
tree.updatePuttable(Branch.Pretty, prettyTree),
tree.updatePuttable(Branch.Private, mmpt)
])
// Fin
return tree
}
static async fromCID(
{ cid, permissions }: { cid: CID; permissions?: Permissions }
): Promise<RootTree> {
const links = await protocol.basic.getLinks(cid)
const keys = permissions ? await permissionKeys(permissions) : []
// Load public parts
const publicCID = links[Branch.Public]?.cid || null
const publicTree = publicCID === null
? await PublicTree.empty()
: await PublicTree.fromCID(publicCID)
const prettyTree = links[Branch.Pretty]
? await BareTree.fromCID(links[Branch.Pretty].cid)
: await BareTree.empty()
// Load private bits
const privateCID = links[Branch.Private]?.cid || null
let mmpt, privateNodes
if (privateCID === null) {
mmpt = MMPT.create()
privateNodes = {}
} else {
mmpt = await MMPT.fromCID(privateCID)
privateNodes = await loadPrivateNodes(keys, mmpt)
}
const privateLogCid = links[Branch.PrivateLog]?.cid
const privateLog = privateLogCid
? await ipfs.dagGet(privateLogCid)
.then(dagNode => dagNode.Links.map(link.fromDAGLink))
.then(links => links.sort((a, b) => {
return parseInt(a.name, 10) - parseInt(b.name, 10)
}))
: []
// Construct tree
const tree = new RootTree({
links,
mmpt,
privateLog,
publicTree,
prettyTree,
privateNodes
})
// Fin
return tree
}
// MUTATIONS
// ---------
async put(): Promise<CID> {
const { cid } = await this.putDetailed()
return cid
}
async putDetailed(): Promise<AddResult> {
return protocol.basic.putLinks(this.links)
}
updateLink(name: string, result: AddResult): this {
const { cid, size, isFile } = result
this.links[name] = link.make(name, cid, isFile, size)
return this
}
async updatePuttable(name: string, puttable: Puttable): Promise<this> {
return this.updateLink(name, await puttable.putDetailed())
}
// PRIVATE TREES
// -------------
static async storeRootKey(rootKey: string): Promise<void> {
const path = pathing.directory(pathing.Branch.Private)
const rootKeyId = await identifiers.readKey({ path })
await crypto.keystore.importSymmKey(rootKey, rootKeyId)
}
findPrivateNode(path: DistinctivePath): [DistinctivePath, PrivateNode | null] {
return findPrivateNode(this.privateNodes, path)
}
// PRIVATE LOG
// -----------
// CBOR array containing chunks.
//
// Chunk size is based on the default IPFS block size,
// which is 1024 * 256 bytes. 1 log chunk should fit in 1 block.
// We'll use the CSV format for the data in the chunks.
static LOG_CHUNK_SIZE = 1020 // Math.floor((1024 * 256) / (256 + 1))
async addPrivateLogEntry(cid: string): Promise<void> {
const log = [...this.privateLog]
let idx = Math.max(0, log.length - 1)
// get last chunk
let lastChunk = log[idx]?.cid
? (await ipfs.cat(log[idx].cid)).split(",")
: []
// needs new chunk
const needsNewChunk = lastChunk.length + 1 > RootTree.LOG_CHUNK_SIZE
if (needsNewChunk) {
idx = idx + 1
lastChunk = []
}
// add to chunk
const hashedCid = await crypto.hash.sha256Str(cid)
const updatedChunk = [...lastChunk, hashedCid]
const updatedChunkDeposit = await protocol.basic.putFile(
updatedChunk.join(",")
)
log[idx] = {
name: idx.toString(),
cid: updatedChunkDeposit.cid,
size: updatedChunkDeposit.size
}
// save log
const logDeposit = await ipfs.dagPutLinks(
log.map(link.toDAGLink)
)
this.updateLink(Branch.PrivateLog, {
cid: logDeposit.cid,
isFile: false,
size: await ipfs.size(logDeposit.cid)
})
this.privateLog = log
}
// VERSION
// -------
async setVersion(v: versions.SemVer): Promise<this> {
const result = await protocol.basic.putFile(versions.toString(v))
return this.updateLink(Branch.Version, result)
}
async getVersion(): Promise<versions.SemVer | null> {
const file = await protocol.basic.getFile(this.links[Branch.Version].cid)
return versions.fromString(uint8arrays.toString(file))
}
}
// ㊙️
type PathKey = { path: DistinctivePath; key: string }
async function findBareNameFilter(
map: Record<string, PrivateNode>,
path: DistinctivePath
): Promise<Maybe<BareNameFilter>> {
const bareNameFilterId = await identifiers.bareNameFilter({ path })
const bareNameFilter: Maybe<BareNameFilter> = await storage.getItem(bareNameFilterId)
if (bareNameFilter) return bareNameFilter
const [nodePath, node] = findPrivateNode(map, path)
if (!node) return null
const unwrappedPath = pathing.unwrap(path)
const relativePath = unwrappedPath.slice(pathing.unwrap(nodePath).length)
if (PrivateFile.instanceOf(node)) {
return relativePath.length === 0 ? node.header.bareNameFilter : null
}
if (!node.exists(relativePath)) {
if (pathing.isDirectory(path)) await node.mkdir(relativePath)
else await node.add(relativePath, "")
}
return node.get(relativePath).then(t => t ? t.header.bareNameFilter : null)
}
function findPrivateNode(
map: Record<string, PrivateNode>,
path: DistinctivePath
): [DistinctivePath, PrivateNode | null] {
const t = map[pathing.toPosix(path)]
if (t) return [ path, t ]
const parent = pathing.parent(path)
return parent
? findPrivateNode(map, parent)
: [ path, null ]
}
function loadPrivateNodes(
pathKeys: PathKey[],
mmpt: MMPT
): Promise<Record<string, PrivateNode>> {
return sortedPathKeys(pathKeys).reduce((acc, { path, key }) => {
return acc.then(async map => {
let privateNode
const unwrappedPath = pathing.unwrap(path)
// if root, no need for bare name filter
if (unwrappedPath.length === 1 && unwrappedPath[0] === pathing.Branch.Private) {
privateNode = await PrivateTree.fromBaseKey(mmpt, key)
} else {
const bareNameFilter = await findBareNameFilter(map, path)
if (!bareNameFilter) throw new Error(`Was trying to load the PrivateTree for the path \`${path}\`, but couldn't find the bare name filter for it.`)
if (pathing.isDirectory(path)) {
privateNode = await PrivateTree.fromBareNameFilter(mmpt, bareNameFilter, key)
} else {
privateNode = await PrivateFile.fromBareNameFilter(mmpt, bareNameFilter, key)
}
}
const posixPath = pathing.toPosix(path)
return { ...map, [posixPath]: privateNode }
})
}, Promise.resolve({}))
}
async function permissionKeys(
permissions: Permissions
): Promise<PathKey[]> {
return ucanPermissions.paths(permissions).reduce(async (
acc: Promise<PathKey[]>,
path: DistinctivePath
): Promise<PathKey[]> => {
if (pathing.isBranch(pathing.Branch.Public, path)) return acc
const name = await identifiers.readKey({ path })
const key = await crypto.keystore.exportSymmKey(name)
const pk: PathKey = { path: path, key: key }
return acc.then(
list => [ ...list, pk ]
)
}, Promise.resolve(
[]
))
}
/**
* Sort keys alphabetically by path.
* This is used to sort paths by parent first.
*/
function sortedPathKeys(list: PathKey[]): PathKey[] {
return list.sort(
(a, b) => pathing.toPosix(a.path).localeCompare(pathing.toPosix(b.path))
)
} | the_stack |
import { InMemoryProject } from "@atomist/automation-client/lib/project/mem/InMemoryProject";
import * as assert from "power-assert";
import { PushListenerInvocation } from "../../../../lib/api/listener/PushListener";
import {
hasFile,
hasFileContaining,
hasFileWithExtension,
isBranch,
IsPushToBranchWithPullRequest,
isRepo,
ToDefaultBranch,
} from "../../../../lib/api/mapping/support/commonPushTests";
describe("commonPushTests", () => {
describe("toDefaultBranch", () => {
it("should pass for default branch", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "master",
repo: {
defaultBranch: "master",
},
},
};
const r = await ToDefaultBranch.mapping(pli as any as PushListenerInvocation);
assert(r);
});
it("should pass for empty default branch", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "master",
repo: {
defaultBranch: "",
},
},
};
const r = await ToDefaultBranch.mapping(pli as any as PushListenerInvocation);
assert(r);
});
it("should pass for null default branch", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "master",
repo: {
defaultBranch: undefined,
},
},
};
const r = await ToDefaultBranch.mapping(pli as any as PushListenerInvocation);
assert(r);
});
it("should not pass for non default branch", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "some-feature",
repo: {
defaultBranch: "master",
},
},
};
const r = await ToDefaultBranch.mapping(pli as any as PushListenerInvocation);
assert(!r);
});
});
describe("hasFile", () => {
it("should not find file in empty repo", async () => {
const project = InMemoryProject.of();
const r = await hasFile("whatever").mapping({ project } as any as PushListenerInvocation);
assert(!r);
});
it("should find file", async () => {
const project = InMemoryProject.of({ path: "pom.xml", content: "<xml>" });
const r = await hasFile("pom.xml").mapping({ project } as any as PushListenerInvocation);
assert(r);
});
});
describe("hasFileContaining", () => {
it("should not find in empty repo", async () => {
const project = InMemoryProject.of();
const r = await hasFileContaining("x", /y/).mapping({ project } as any as PushListenerInvocation);
assert(!r);
});
it("should find containing", async () => {
const project = InMemoryProject.of({ path: "src/main/java/Thing.java", content: "public class Thing {}" });
const r = await hasFileContaining("**/Thing.java", /class/).mapping({ project } as any as PushListenerInvocation);
assert(r);
});
it("should not find when file does not contain", async () => {
const project = InMemoryProject.of({ path: "src/main/java/Thing.kt", content: "public class Thing {}" });
const r = await hasFileContaining("**/*.java", /xclass/).mapping({ project } as any as PushListenerInvocation);
assert(!r);
});
});
describe("hasFileWithExtension", () => {
it("should return false if no files and empty extension", async () => {
const project = InMemoryProject.of();
const r = await hasFileWithExtension("").mapping({ project } as any as PushListenerInvocation);
assert(!r);
});
it("should return true if any files and empty extension", async () => {
const project = InMemoryProject.of({ path: "pom.xml", content: "<xml>" });
const r = await hasFileWithExtension("").mapping({ project } as any as PushListenerInvocation);
assert(r);
});
it("should return true if any files and undefined extension", async () => {
const project = InMemoryProject.of({ path: "pom.xml", content: "<xml>" });
const r = await hasFileWithExtension(undefined).mapping({ project } as any as PushListenerInvocation);
assert(r);
});
it("should not find file in empty repo", async () => {
const project = InMemoryProject.of();
const r = await hasFileWithExtension("java").mapping({ project } as any as PushListenerInvocation);
assert(!r);
});
it("should find one file", async () => {
const project = InMemoryProject.of({ path: "pom.xml", content: "<xml>" });
const r = await hasFileWithExtension("xml").mapping({ project } as any as PushListenerInvocation);
assert(r);
const r2 = await hasFileWithExtension("java").mapping({ project } as any as PushListenerInvocation);
assert(!r2);
});
it("should strip . if provided", async () => {
const project = InMemoryProject.of({ path: "pom.xml", content: "<xml>" });
const r = await hasFileWithExtension(".xml").mapping({ project } as any as PushListenerInvocation);
assert(r);
});
});
describe("isRepo", () => {
it("should return false if no match", async () => {
const r = await isRepo(/atomist\/.*/).mapping({ id: { owner: "atomisthq", repo: "foo"} } as any as PushListenerInvocation);
assert(!r);
});
it("should return true for match", async () => {
const r = await isRepo(/atomist\/foo.*/).mapping({ id: { owner: "atomist", repo: "foo"} } as any as PushListenerInvocation);
assert(r);
});
});
describe("isBranch", () => {
it("should return true for feature branch", async () => {
const r = await isBranch(/feature.*/).mapping({ push: { branch: "feature-x"} } as any as PushListenerInvocation);
assert(r);
});
it("should return true for match", async () => {
const r = await isBranch(/master/).mapping({ push: { branch: "master"} } as any as PushListenerInvocation);
assert(r);
});
it("should return false for no match", async () => {
const r = await isBranch(/feature.*/).mapping({ push: { branch: "release"} } as any as PushListenerInvocation);
assert(!r);
});
});
describe("IsPushToBranchWithPullRequest", () => {
it("should not pass for default branch", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "changes",
repo: {
defaultBranch: "changes",
},
},
};
const r = await IsPushToBranchWithPullRequest.mapping(pli as any as PushListenerInvocation);
assert(!r);
});
it("should pass for non-default branch with open pull request", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "oh-you-pretty-thing",
repo: {
name: "hunky-dory",
owner: "david-bowie",
defaultBranch: "changes",
},
},
context: {
graphClient: {
query: (q: any) => ({
Repo: [{
branches: [{
pullRequests: [
{ state: "open" },
],
}],
}],
}),
},
},
};
const r = await IsPushToBranchWithPullRequest.mapping(pli as any as PushListenerInvocation);
assert(r);
});
it("should not pass for branch with no open pull requests", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "eight-line-poem",
repo: {
name: "hunky-dory",
owner: "david-bowie",
defaultBranch: "changes",
},
},
context: {
graphClient: {
query: (q: any) => ({
Repo: [{
branches: [{
pullRequests: [
{ state: "closed" },
{ state: "closed" },
{ state: "closed" },
],
}],
}],
}),
},
},
};
const r = await IsPushToBranchWithPullRequest.mapping(pli as any as PushListenerInvocation);
assert(!r);
});
it("should pass for non-default branch with open and closed pull requests", async () => {
const pli = {
project: InMemoryProject.of(),
push: {
branch: "life-on-mars",
repo: {
name: "hunky-dory",
owner: "david-bowie",
defaultBranch: "changes",
},
},
context: {
graphClient: {
query: (q: any) => ({
Repo: [{
branches: [{
pullRequests: [
{ state: "closed" },
{ state: "closed" },
{ state: "open" },
{ state: "closed" },
],
}],
}],
}),
},
},
};
const r = await IsPushToBranchWithPullRequest.mapping(pli as any as PushListenerInvocation);
assert(r);
});
});
}); | the_stack |
declare namespace DDS {
/**
* Base class for all policies
*/
interface Policy {
}
/**
* History policy
*/
export enum HistoryKind {
KeepAll = 0,
KeepLast = 1
}
/**
* History policy
*/
export class History implements Policy {
/**
* KeepAll - KEEP_ALL qos policy
*/
static KeepAll:any;
/**
* KeepLast - KEEP_LAST qos policy
*/
static KeepLast:any;
}
/**
* Reliability Policy
* @example var qos = Reliability.Reliable
*/
export enum ReliabilityKind {
Reliable = 0,
BestEffort = 1
}
/**
* History policy
*/
export class Reliability implements Policy {
/**
* Reliable - 'Reliable' reliability policy
*/
static Reliable:any;
/**
* BestEffort - 'BestEffort' reliability policy
*/
static BestEffort:any;
}
/**
* Create new partition policy
*
* @param policies - partition names
* @example var qos = Partition('p1', 'p2')
*/
export function Partition(...policies:string[]):Policy;
/**
* Create new content filter policy
*
* @param expr - filter expression
* @example var filter = ContentFilter("x>10 AND y<50")
*/
export function ContentFilter(expr:string):Policy;
/**
* Create new time filter policy
*
* @param period - time duration (unit ?)
* @example var filter = TimeFilter(100)
*/
export function TimeFilter(period:number):Policy;
/**
* Durability Policy
*/
export enum DurabilityKind {
Volatile = 0,
TransientLocal = 1,
Transient = 2,
Persistent = 3
}
/**
* Durability Qos Policy
*/
export class Durability implements Policy {
/**
* Volatile - Volatile durability policy
*/
static Volatile:any;
/**
* TransientLocal - TransientLocal durability policy
*/
static TransientLocal:any;
/**
* Transient - Transient durability policy
*/
static Transient:any;
/**
* Persistent - Persistent durability policy
*/
static Persistent:any;
}
interface EntityQos {
/**
* Creates any of the DDS entities quality of service, including DataReaderQos and DataWriterQos.
*
* @param policies - list of policies for the Qos entity
*/
new (...policies:Policy[]): EntityQos;
/**
* Adds the given policy to this instance.
* @param policy - the policy to add
* @return A new copy of this instance with the combined policies
*/
add (policy:Policy): EntityQos;
}
/**
* Topic quality of service object
*/
export var TopicQos:EntityQos;
/**
* DataReader quality of service object
*/
export var DataReaderQos:EntityQos;
/**
* DataWriter quality of service object
*/
export var DataWriterQos:EntityQos;
export class Topic {
/**
* Creates a `Topic` in the domain `did`, named `tname`, having `qos` Qos,
* for the type `ttype` whose registered name is `tregtype`
* @param {number} did - DDS domain ID
* @param {string} tname - topic name
* @param {TopicQos} qos - topic Qos
* @param {string} ttype - topic type. If not specified, a generic type is used.
* @param {string} tregtype - topic registered type name. If not specified, 'ttype' is used.
*/
constructor(did:number, tname:string, qos:EntityQos, ttype?:string, tregtype?:string);
/**
* Called when topic gets registered in the runtime
*/
onregistered():void;
/**
* Called when topic gets unregistered in the runtime
*/
onunregistered():void;
}
export class DataReader {
/**
* Creates a `DataReader` for a given topic and a specific in a specific DDS runtime.
*
* A `DataReader` allows to read data for a given topic with a specific QoS. A `DataReader`
* * goes through different states, it is intially disconnected and changes to the connected state
* when the underlying transport connection is successfully established with the server. At this point
* a `DataReader` can be explicitely closed or disconnected. A disconnection can happen as the result
* of a network failure or server failure. Disconnection and reconnections are managed by the runtime.
*
* @param runtime - DDS Runtime
* @param topic - DDS Topic
* @param qos - DataReader quality of service
*/
constructor(runtime:Runtime, topic:Topic, qos:EntityQos);
resetStats():void;
/**
* Attaches the listener `l` to this data reader and returns
* the id associated to the listener.
* @param l - listener code
* @returns listener handle
*/
addListener(l:(msg:any) => void):number;
/**
* removes a listener from this data reader.
* @param idx - listener id
*/
removeListener(idx:number):void;
/**
* closes the DataReader
*/
close():void;
}
export class DataWriter {
/**
* Creates a `DataWriter` for a given topic and a specific in a specific DDS runtime
*
* defines a DDS data writer. This type
* is used to write data for a specific topic with a given QoS.
* A `DataWriter` goes through different states, it is intially disconnected and changes to the connected
* state when the underlying transport connection is successfully established with the server.
* At this point a `DataWriter` can be explicitely closed or disconnected. A disconnection can happen
* as the result of a network failure or server failure. Disconnection and reconnections are managed by the
* runtime.
*
* @param runtime - DDS Runtime
* @param topic - DDS Topic
* @param qos - DataWriter quality of service
*/
constructor(runtime:Runtime, topic:Topic, qos:EntityQos);
/**
* Writes one or more samples.
* @param ds - data sample
*/
write(...ds:any[]):void;
/**
* Closes the DataWriter
*/
close():void;
}
export class DataCache {
/**
* Constructs a `DataCache` with a given `depth`. If the `cache` parameter
* is present, then the current cache is initialized with this parameter.
*
* Provides a way of storing and flexibly accessing the
* data received through a `DataReader`. A `DataCache` is organized as
* a map of queues. The depth of the queues is specified at construction
* time.
*
* @param depth - cache size
* @param cache - cache data structure
*/
constructor(depth:number, cache:any);
/**
* Register a listener to be notified whenever data which matches a predicate is written into the cache.
* If no predicate is provided then the listeners is always notified upon data inserion.
*
* @param l - listener function
* @param p - predicate
*/
addListener(l:(data:any) => void, p?:(data:any) => boolean):void;
/**
* Write the element `data` with key `k` into the cache.
*
* @param k - data key
* @param data - data value
* @returns the written data value
*/
write(k:any, data:any):any;
/**
* Same as forEach but applied, for each key, only to the first `n` samples of the cache
*
* @param f - the function to be applied
* @param n - samples set size
*/
forEachN(f:(data:any) => any, n:number):any[];
/**
* Execute the function `f` for each element of the cache.
*
* @memberof! dds.DataCache#
* @param f - the function to be applied
* @returns results of the function execution
*/
forEach(f:(data:any) => any):any[];
/**
* Returns a cache that is the result of applying `f` to each element of the cache.
*
* @param f - the function to be applied
* @returns A cache holding the results of the function execution
*/
map(f:(data:any) => any):DataCache;
/**
* Returns the list of elements in the cache that satisfy the predicate `f`.
*
* @param f - the predicate to be applied to filter the cache values
* @returns An array holding the filtered values
*/
filter(f:(data:any) => boolean):any[];
/**
* Returns the list of elements in the cache that doesn't satisfy the predicate `f`.
*
* @returns An array holding the filtered values
* @see DataCache#filter
*/
filterNot(f:(data:any) => boolean):any[];
/**
* Returns the values included in the cache as an array.
*
* @return All the cache values
*/
read():any[];
/**
* Returns the last value of the cache in an array.
*
* @return the last value of the cache
*/
readLast():any;
/**
* Returns all the values included in the cache as an array and empties the cache.
*
* @return All the cache values
*/
takeAll():any[];
/**
* Returns the `K`ith value of the cache as Monad, ie: `coffez.Some` if it exists, `coffez.None` if not.
*
* @return the 'k'th value
*/
take():any;
/**
* Takes elements from the cache up to when the predicate `f` is satisfied
*
* @param f - the predicate
* @return taken cache values
*/
takeWithFilter(f:(data:any) => boolean):any[];
/**
* Return `coffez.Some(v)` if there is an element in the cache corresponding to the
* key `k` otherwise it returns `coffez.None`.
*
* @param k - key
*/
get(k:any):any;
/**
* Return `coffez.Some(v)` if there is an element in the cache corresponding to the
* key `k` otherwise executes `f` and returns its result.
*
* @param k - key
* @param f - the function to apply
*/
getOrElse(k:any, f:(data:any)=> any):any;
/**
* folds the element of the cache using `z` as the `zero` element and
* `f` as the binary operator.
*
* @param z - initial value
* @param {function} f - reduce function
*/
fold(z:any, f:(data:any) => any):void;
/**
* clears the data cache
*/
clear():void;
}
interface Runtime {
/**
* Constructs a DDS Runtime object
*
* maintains the connection with the server, re-establish the connection
* if dropped and mediates the `DataReader` and `DataWriter` communication.
*/
new (): Runtime;
/**
* Connect the runtime to the server. If the runtime is already connected an exception is thrown
*
* @param srv - Vortex Web server WebSocket URL
* @param authToken - Authorization token
*/
connect(server:string, authToken?:string): void;
/**
* Disconnects, withouth closing, a `Runtime`. Notice that upon re-connection all existing
* subscriptions and publications will be re-restablished.
*/
disconnect(): void;
/**
* Registers the provided Topic.
*
* @param t - Topic to be registered
*/
registerTopic(t:Topic): void;
/**
* Function called when runtime is connected.
*
* @param e
*/
onconnect(e:any): void;
/**
* Function called when runtime is disconnected.
*
* @param e
*/
ondisconnect(e:any): void;
/**
* Closes the DDS runtime and as a consequence all the `DataReaders` and `DataWriters` that belong to this runtime.
*
*/
close(): void;
/**
* Checks whether the Runtime is connected.
* @return `true` if connected, `false` if not
*/
isConnected() : boolean;
/**
* Checks whether the Runtime is closed.
* @return `true` if connected, `false` if not
*/
isClosed() : boolean;
}
export var runtime:{
Runtime : Runtime;
};
export var VERSION:string;
}
/**
* Defines the core Vortex-Web-Client javascript library. It includes the JavaScript API for DDS. This API allows
* web applications to share data among them as well as with native DDS applications.
*/
declare var dds:typeof DDS; | the_stack |
import type { Network, Provider } from "@saberhq/solana-contrib";
import {
DEFAULT_NETWORK_CONFIG_MAP,
SignerWallet,
} from "@saberhq/solana-contrib";
import type { ISeedPoolAccountsFn } from "@saberhq/stableswap-sdk";
import {
DEFAULT_TOKEN_DECIMALS,
deployNewSwap,
RECOMMENDED_FEES,
} from "@saberhq/stableswap-sdk";
import type { TokenAccountData } from "@saberhq/token-utils";
import {
ASSOCIATED_TOKEN_PROGRAM_ID,
SPLToken,
TOKEN_PROGRAM_ID,
u64,
} from "@saberhq/token-utils";
import type { Signer } from "@solana/web3.js";
import {
Account,
Connection,
Keypair,
LAMPORTS_PER_SOL,
PublicKey,
} from "@solana/web3.js";
import base58 from "bs58";
import * as fs from "fs/promises";
import * as os from "os";
import path from "path";
import yargs from "yargs";
import { hideBin } from "yargs/helpers";
import { deployTestTokens } from "../../test/deployTestTokens";
const DEFAULT_AMP_FACTOR = 100;
export const DEFAULT_INITIAL_TOKEN_A_AMOUNT =
1_000_000 * Math.pow(10, DEFAULT_TOKEN_DECIMALS);
export const DEFAULT_INITIAL_TOKEN_B_AMOUNT =
1_000_000 * Math.pow(10, DEFAULT_TOKEN_DECIMALS);
const readKeyfile = async (path: string): Promise<Keypair> =>
Keypair.fromSecretKey(
new Uint8Array(JSON.parse(await fs.readFile(path, "utf-8")) as number[])
);
const run = async ({
provider,
programID,
adminAccount,
outfile,
ampFactor,
swapAccountSigner,
poolTokenMintSigner,
initialLiquidityProvider,
useAssociatedAccountForInitialLP,
tokenAMint,
tokenBMint,
seedPoolAccounts,
minterPrivateKey,
}: {
provider: Provider;
programID: PublicKey;
adminAccount: PublicKey;
outfile: string;
ampFactor: number;
swapAccountSigner?: Signer;
poolTokenMintSigner?: Signer;
initialLiquidityProvider?: PublicKey;
useAssociatedAccountForInitialLP?: boolean;
tokenAMint: PublicKey;
tokenBMint: PublicKey;
seedPoolAccounts: ISeedPoolAccountsFn;
minterPrivateKey?: string;
}) => {
const fees = RECOMMENDED_FEES;
const { swap: newSwap } = await deployNewSwap({
provider,
swapProgramID: programID,
adminAccount,
tokenAMint,
tokenBMint,
ampFactor: new u64(ampFactor),
fees,
swapAccountSigner,
poolTokenMintSigner,
initialLiquidityProvider,
useAssociatedAccountForInitialLP,
seedPoolAccounts,
});
const accounts = {
...(minterPrivateKey
? {
MinterPrivateKey: minterPrivateKey,
}
: {}),
TokenAMint: tokenAMint.toString(),
TokenBMint: tokenBMint.toString(),
SwapAddress: newSwap.config.swapAccount.toString(),
ProgramID: newSwap.config.swapProgramID.toString(),
Fees: fees,
AdminAccount: adminAccount.toString(),
LPTokenMint: newSwap.state.poolTokenMint.toString(),
AdminFeeAccountA: newSwap.state.tokenA.adminFeeAccount.toString(),
AdminFeeAccountB: newSwap.state.tokenB.adminFeeAccount.toString(),
};
// write the file
const jsonRepr = JSON.stringify(accounts, null, 2);
await fs.mkdir(path.dirname(outfile), { recursive: true });
await fs.writeFile(outfile, jsonRepr);
console.log("Swap deploy successful! Info:");
console.log(jsonRepr);
console.log(`File written to ${outfile}.`);
};
export default async (): Promise<void> => {
await yargs(hideBin(process.argv))
.option("cluster", {
alias: "c",
type: "string",
description: "Solana Cluster",
})
.choices("cluster", Object.keys(DEFAULT_NETWORK_CONFIG_MAP))
.demandOption("cluster")
.command(
"deploy-pool",
"Deploys a new StableSwap pool.",
// eslint-disable-next-line @typescript-eslint/no-empty-function
(y) =>
y
.option("admin_account", {
type: "string",
description: "Admin account public key.",
})
.demandOption("admin_account")
.option("initial_amp_factor", {
type: "number",
description: "The initial amp factor to set for the pool.",
})
.default("initial_amp_factor", DEFAULT_AMP_FACTOR)
.option("initial_liquidity_provider_keyfile", {
type: "string",
description:
"Keyfile of the initial liquidity provider. This account should possess Token A and Token B.",
})
.option("initial_token_a_amount", {
type: "number",
description: "Initial amount of token A",
})
.default("initial_token_a_amount", DEFAULT_INITIAL_TOKEN_A_AMOUNT)
.option("initial_token_b_amount", {
type: "number",
description: "Initial amount of token B",
})
.default("initial_token_b_amount", DEFAULT_INITIAL_TOKEN_B_AMOUNT)
.option("token_a_mint", {
type: "string",
description: "Mint of Token A.",
})
.option("token_b_mint", {
type: "string",
description: "Mint of Token B.",
})
.option("deploy_test_tokens", {
type: "boolean",
description: "Deploys test tokens. This cannot be used on mainnet.",
})
.option("payer_keyfile", {
type: "string",
description: "Path to the JSON private key of the payer account.",
})
.option("request_sol_airdrop", {
type: "boolean",
description:
"If specified, requests an airdrop of SOL to the payer account.",
})
.option("swap_account_keyfile", {
type: "string",
description: "Path to the JSON private key of the swap account.",
})
.option("pool_token_mint_keyfile", {
type: "string",
description:
"Path to the JSON private key of the mint of the pool token.",
})
.option("program_id", {
type: "string",
description: "The program ID of the swap.",
})
.demandOption("program_id")
.option("outfile", {
type: "string",
description: "Path to where the accounts file should be written.",
}),
// eslint-disable-next-line @typescript-eslint/no-misused-promises
async ({
cluster,
admin_account,
initial_liquidity_provider_keyfile,
outfile: maybeOutfile,
payer_keyfile,
request_sol_airdrop,
swap_account_keyfile,
pool_token_mint_keyfile,
initial_amp_factor,
program_id,
token_a_mint,
token_b_mint,
initial_token_a_amount,
initial_token_b_amount,
deploy_test_tokens,
}): Promise<void> => {
const swapAccountSigner = swap_account_keyfile
? await readKeyfile(swap_account_keyfile)
: undefined;
const poolTokenMintSigner = pool_token_mint_keyfile
? await readKeyfile(pool_token_mint_keyfile)
: undefined;
const outfile =
maybeOutfile ??
`${os.homedir()}/stableswap_deployments/${cluster}/pool-${
swapAccountSigner?.publicKey.toString() ??
(Math.random() * 100).toString()
}.json`;
if (!maybeOutfile) {
console.warn(`--outfile not specified. Defaulting to ${outfile}`);
}
const payerSigner = payer_keyfile
? await readKeyfile(payer_keyfile)
: Keypair.generate();
console.log(`Deploying to cluster ${cluster}`);
const connection = new Connection(
DEFAULT_NETWORK_CONFIG_MAP[cluster as Network].endpoint
);
const provider = new SignerWallet(payerSigner).createProvider(
connection
);
if (!payer_keyfile) {
if (cluster === "mainnet-beta") {
console.error("Must specify `payer_keyfile` on mainnet.");
return;
}
console.warn(
"`payer_keyfile` not specified. Generating a new keypair."
);
}
if (request_sol_airdrop || !payer_keyfile) {
if (cluster === "mainnet-beta") {
console.error("Cannot request an airdrop of SOL on mainnet.");
return;
}
console.log(
`Requesting an airdrop of SOL to ${payerSigner.publicKey.toString()}`
);
const txSig = await connection.requestAirdrop(
payerSigner.publicKey,
10 * LAMPORTS_PER_SOL
);
await connection.confirmTransaction(txSig);
}
// set up tokens
let tokenAMint: PublicKey;
let tokenBMint: PublicKey;
let seedPoolAccounts: ISeedPoolAccountsFn | undefined = undefined;
let minterPrivateKey: string | undefined = undefined;
const shouldDeployTestTokens =
deploy_test_tokens ??
(!(token_a_mint && token_b_mint) && cluster !== "mainnet-beta");
if (shouldDeployTestTokens && !deploy_test_tokens) {
console.warn(
"Token A and B mints not both specified. Defaulting to deploying test tokens."
);
}
if (shouldDeployTestTokens) {
if (token_a_mint || token_b_mint) {
console.error(
"Mint cannot be specified with `--deploy_test_tokens`."
);
return;
}
const testTokens = await deployTestTokens({
provider,
initialTokenAAmount: initial_token_a_amount,
initialTokenBAmount: initial_token_b_amount,
});
tokenAMint = testTokens.mintA;
tokenBMint = testTokens.mintB;
seedPoolAccounts = testTokens.seedPoolAccounts;
minterPrivateKey = base58.encode(testTokens.minterSigner.secretKey);
} else if (!token_a_mint || !token_b_mint) {
console.error("`--token_a_mint` and `--token_b_mint` are required.");
return;
} else {
tokenAMint = new PublicKey(token_a_mint);
tokenBMint = new PublicKey(token_b_mint);
}
// set up initial LP
let initialLiquidityProvider: PublicKey | undefined;
if (initial_liquidity_provider_keyfile) {
const initialLiquidityProviderKP = await readKeyfile(
initial_liquidity_provider_keyfile
);
initialLiquidityProvider = initialLiquidityProviderKP.publicKey;
const [sourceAccountA, sourceAccountB] = (await Promise.all(
([tokenAMint, tokenBMint] as const).map(
async (mint) =>
await SPLToken.getAssociatedTokenAddress(
ASSOCIATED_TOKEN_PROGRAM_ID,
TOKEN_PROGRAM_ID,
mint,
initialLiquidityProviderKP.publicKey
)
)
)) as [PublicKey, PublicKey];
const [infoA, infoB] = (await Promise.all(
[
[tokenAMint, sourceAccountA] as const,
[tokenBMint, sourceAccountB] as const,
].map(async ([mint, source]) =>
new SPLToken(
connection,
mint,
TOKEN_PROGRAM_ID,
new Account(payerSigner.secretKey)
).getAccountInfo(source)
)
)) as [TokenAccountData, TokenAccountData];
// check balances for seed
if (infoA.amount.lt(new u64(initial_token_a_amount))) {
console.error(
`Token A balance too low for LP ${initialLiquidityProvider.toString()}`
);
process.exit(1);
}
if (infoB.amount.lt(new u64(initial_token_b_amount))) {
console.error(
`Token B balance too low for LP ${initialLiquidityProvider.toString()}`
);
process.exit(1);
}
// seed accounts
seedPoolAccounts = ({ tokenAAccount, tokenBAccount }) => {
return {
instructions: [
SPLToken.createTransferInstruction(
TOKEN_PROGRAM_ID,
sourceAccountA,
tokenAAccount,
initialLiquidityProviderKP.publicKey,
[new Account(initialLiquidityProviderKP.secretKey)],
initial_token_a_amount
),
SPLToken.createTransferInstruction(
TOKEN_PROGRAM_ID,
sourceAccountB,
tokenBAccount,
initialLiquidityProviderKP.publicKey,
[new Account(initialLiquidityProviderKP.secretKey)],
initial_token_b_amount
),
],
signers: [initialLiquidityProviderKP],
};
};
} else if (!shouldDeployTestTokens) {
console.error(
"No initial LP provided, but there is also no test token deployment."
);
process.exit(1);
}
if (!seedPoolAccounts) {
console.error("Seed pool accounts not created.");
process.exit(1);
}
try {
const programID = new PublicKey(program_id);
console.log(
`Deploying new swap with program ID ${programID.toString()}`
);
await run({
provider,
programID,
adminAccount: new PublicKey(admin_account),
outfile,
ampFactor: initial_amp_factor,
swapAccountSigner,
poolTokenMintSigner,
initialLiquidityProvider,
useAssociatedAccountForInitialLP: true,
tokenAMint,
tokenBMint,
seedPoolAccounts,
minterPrivateKey,
}).catch((err) => {
console.error("Error deploying new swap.");
console.error(err);
process.exit(1);
});
} catch (e) {
if (e instanceof Error && e.message.includes("ENOENT")) {
console.error(
`The program deployment info for cluster ${cluster} was not found.`,
e
);
} else {
console.error(`Could not open deployment info`, e);
}
}
}
)
.demandCommand()
.help().argv;
}; | the_stack |
import * as spec from '@jsii/spec';
import { Stability } from '@jsii/spec';
import { sourceToAssemblyHelper as compile, compileJsiiForTest } from '../lib';
import { renderSymbolDocumentation } from '../lib/docs';
jest.setTimeout(60_000);
// ----------------------------------------------------------------------
test('extract summary line from doc block, ends with a period', async () => {
const assembly = await compile(`
/**
* Hello this is the documentation for this class
*/
export class Foo {
public bar() { }
}
`);
expect(assembly.types!['testpkg.Foo'].docs).toEqual({
summary: 'Hello this is the documentation for this class.',
});
});
// ----------------------------------------------------------------------
test('extract remarks from whitespace-separated doc block', async () => {
const assembly = await compile(`
/**
* Hello this is the documentation for this class.
*
* Here are some more details about it.
*
* It looks pretty good, doesn't it?
*/
export class Foo {
public bar() { }
}
`);
expect(assembly.types!['testpkg.Foo'].docs!.remarks).toBe(
"Here are some more details about it.\n\nIt looks pretty good, doesn't it?",
);
});
// ----------------------------------------------------------------------
test('separate long doc comment into summary and remarks', async () => {
const assembly = await compile(`
/**
* Lots of people enjoy writing very long captions here. I think it's because they
* copy/paste them out of CloudFormation, which has a tendency to just have one
* doc block per API item and no structural separation.
*/
export class Foo {
public bar() { }
}
`);
expect(assembly.types!['testpkg.Foo'].docs).toEqual({
summary: 'Lots of people enjoy writing very long captions here.',
remarks:
"I think it's because they\ncopy/paste them out of CloudFormation, which has a tendency to just have one\n" +
'doc block per API item and no structural separation.',
});
});
// ----------------------------------------------------------------------
test('separate non-space but newline terminated docs into summary&remarks', async () => {
const assembly = await compile(`
/**
* Lots of people enjoy writing very long captions here.
* I think it's because they copy/paste them out of CloudFormation,
* which has a tendency to just have one
* doc block per API item and no structural separation.
*/
export class Foo {
public bar() { }
}
`);
expect(assembly.types!['testpkg.Foo'].docs).toEqual({
summary: 'Lots of people enjoy writing very long captions here.',
remarks:
"I think it's because they copy/paste them out of CloudFormation,\nwhich has a tendency to just have one\n" +
'doc block per API item and no structural separation.',
});
});
// ----------------------------------------------------------------------
test('dont add period to summary that ends in exclamation mark', async () => {
const assembly = await compile(`
/**
* I'm happy about this class!
*/
export class Foo {
public bar() { }
}
`);
expect(assembly.types!['testpkg.Foo'].docs).toEqual({
summary: "I'm happy about this class!",
});
});
// ----------------------------------------------------------------------
test('parse method docs', async () => {
const assembly = await compile(`
export class Foo {
/**
* Do the foo
*/
public bar(arg: string) { Array.isArray(arg); }
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.methods![0].docs).toEqual({
summary: 'Do the foo.',
});
});
// ----------------------------------------------------------------------
test('associate parameter comments with right parameter', async () => {
const assembly = await compile(`
export class Foo {
/**
* Do the foo
*
* @param arg First argument is best argument
*/
public bar(arg: string) { Array.isArray(arg); }
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.methods![0].parameters![0].docs).toEqual({
summary: 'First argument is best argument.',
});
});
// ----------------------------------------------------------------------
test('read example', async () => {
const assembly = await compile(`
export class Foo {
/**
* Do the foo
*
* @example
*
* // Example of fooing it up:
* new Foo().bar();
*/
public bar() {}
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.methods![0].docs!.example).toBe(
'// Example of fooing it up:\n' + 'new Foo().bar();',
);
});
// ----------------------------------------------------------------------
test('read default value', async () => {
const assembly = await compile(`
export interface Foo {
/**
* The foo we're talking about
*
* @default Some foo
*/
readonly foo?: string;
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.InterfaceType;
expect(classType.properties![0].docs!.default).toBe('Some foo');
});
// ----------------------------------------------------------------------
test('read "see" annotation', async () => {
const assembly = await compile(`
/**
* @see http://lmgtfy.com/
*/
export class Foo {
}
`);
expect(assembly.types!['testpkg.Foo'].docs!.see).toBe('http://lmgtfy.com/');
});
// ----------------------------------------------------------------------
test('read "returns" annotation', async () => {
const assembly = await compile(`
export class Foo {
/**
* Do the foo
*
* @returns Nothing, why would it?
*/
public bar(arg: string) { Array.isArray(arg); }
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.methods![0].docs!.returns).toBe('Nothing, why would it?');
});
// ----------------------------------------------------------------------
test('can haz deprecated', async () => {
const assembly = await compile(`
export class Foo {
/**
* Do the foo
*
* @deprecated These days we do the bar
*/
public bar(arg: string) { Array.isArray(arg); }
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.methods![0].docs!.deprecated).toBe(
'These days we do the bar',
);
});
// ----------------------------------------------------------------------
test('can mark stable', async () => {
const assembly = await compile(`
/**
* Rock solid Foo
*
* @stable
*/
export class Foo {
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.docs!.stability).toBe(spec.Stability.Stable);
});
// ----------------------------------------------------------------------
test('can mark experimental', async () => {
const assembly = await compile(`
/**
* Slightly less solid Foo
*
* @experimental
*/
export class Foo {
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.docs!.stability).toBe(spec.Stability.Experimental);
});
// ----------------------------------------------------------------------
test('can mark external', async () => {
const assembly = await compile(`
/**
* @stability external
*/
export class Foo {
public floop() {
Array.isArray(3);
}
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
const method = classType.methods!.find((m) => m.name === 'floop');
expect(classType.docs!.stability).toBe(spec.Stability.External);
expect(method!.docs!.stability).toBe(spec.Stability.External);
});
// ----------------------------------------------------------------------
test('can mark subclassable', async () => {
const assembly = await compile(`
/**
* Become this Foo
*
* @subclassable
*/
export class Foo {
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.docs!.subclassable).toBeTruthy();
});
// ----------------------------------------------------------------------
test('can add arbitrary tags', async () => {
const assembly = await compile(`
/**
* @boop
*/
export class Foo {
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.docs!.custom).toEqual({ boop: 'true' });
});
// ----------------------------------------------------------------------
test('stability is inherited from parent type', async () => {
const stabilities = [
['@deprecated Not good no more', Stability.Deprecated],
['@experimental', Stability.Experimental],
['@stable', Stability.Stable],
];
for (const [tag, stability] of stabilities) {
// eslint-disable-next-line no-await-in-loop
const assembly = await compile(`
/**
* ${tag}
*/
export class Foo {
constructor() {
Array.isArray(3);
}
public bar() {
Array.isArray(3);
}
}
`);
/* eslint-enable no-await-in-loop */
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
const initializer = classType.initializer!;
const method = classType.methods!.find((m) => m.name === 'bar')!;
expect(classType.docs!.stability).toBe(stability);
expect(initializer.docs!.stability).toBe(stability);
expect(method.docs!.stability).toBe(stability);
}
});
// ----------------------------------------------------------------------
test('@example can contain @ sign', async () => {
const assembly = await compile(`
/**
* An IAM role to associate with the instance profile assigned to this Auto Scaling Group.
*
* @example
*
* import * as x from '@banana';
*/
export class Foo {
}
`);
const classType = assembly.types!['testpkg.Foo'] as spec.ClassType;
expect(classType.docs!.example).toBe("import * as x from '@banana';");
});
// ----------------------------------------------------------------------
test('@experimental status is reflected in generated docstring', async () => {
const result = await compileJsiiForTest(`
/**
* Here is a fresh class
*
* @experimental
*/
export class Foo {
}
`);
expect(result.files['index.js']).toContain(
lines(
'/**',
' * (experimental) Here is a fresh class.',
' *',
' * @experimental',
' */',
'class Foo {',
'}',
),
);
expect(result.files['index.d.ts']).toContain(
lines(
'/**',
' * (experimental) Here is a fresh class.',
' *',
' * @experimental',
' */',
'export declare class Foo {',
'}',
),
);
});
// ----------------------------------------------------------------------
test('@deprecated status is reflected in generated docstring', async () => {
const result = await compileJsiiForTest(`
/**
* Here is an old class
*
* @deprecated Use something else
*/
export class Fogey {
}
`);
expect(result.files['index.js']).toContain(
lines(
'/**',
' * (deprecated) Here is an old class.',
' *',
' * @deprecated Use something else',
' */',
'class Fogey {',
'}',
),
);
expect(result.files['index.d.ts']).toContain(
lines(
'/**',
' * (deprecated) Here is an old class.',
' *',
' * @deprecated Use something else',
' */',
'export declare class Fogey {',
'}',
),
);
});
// ----------------------------------------------------------------------
test('Rendering jsii docs back to a doc comment', () => {
expect(
renderSymbolDocumentation({
summary: 'This is the summary',
remarks: 'You can use this\nor not, as you see fit.',
default: 'thas a default value',
see: 'https://some.url/',
subclassable: true,
returns: 'A value',
example: 'print("a thing");',
custom: {
sing: 'whenyourewinning',
},
}),
).toEqual(
lines(
'This is the summary',
'',
'You can use this',
'or not, as you see fit.',
'',
'@returns A value',
'@default thas a default value',
'@see https://some.url/',
'@subclassable',
'@sing whenyourewinning',
'@example',
'',
'print("a thing");',
),
);
});
// ----------------------------------------------------------------------
function lines(...ls: string[]) {
return indented(0, ...ls);
}
function indented(indent: number, ...lines: string[]) {
const prefix = ' '.repeat(indent);
return lines.map((l) => `${prefix}${l}`).join('\n');
} | the_stack |
import setupProject from 'helpers/setupProject'
const TIMEOUT = 200000 // we need time for docker interactions
describe('Full E2E', () => {
it(
'runs the full monodeploy pipeline',
setupProject({
repository: [
{
'pkg-1': {},
'pkg-2': {
dependencies: ['pkg-1'],
},
'pkg-3': { dependencies: [['pkg-2', 'workspace:*']] },
'pkg-4': { dependencies: ['pkg-3'] },
'pkg-isolated': {},
},
],
config: {
access: 'public',
changelogFilename: 'changelog.md',
changesetFilename: 'changes.json.tmp',
dryRun: false,
autoCommit: true,
autoCommitMessage: 'chore: release',
conventionalChangelogConfig: require.resolve(
'@tophat/conventional-changelog-config',
),
git: {
push: true,
remote: 'origin',
tag: true,
},
jobs: 1,
persistVersions: true,
noRegistry: false,
topological: true,
topologicalDev: true,
maxConcurrentReads: 1,
maxConcurrentWrites: 1,
},
testCase: async ({ run, readFile, exec, writeFile }) => {
// First semantic commit
await writeFile('packages/pkg-1/README.md', 'Modification.')
await exec('git add . && git commit -n -m "feat: some fancy addition" && git push')
const { error } = await run()
if (error) console.error(error)
expect(error).toBeUndefined()
// verify yarn.lock is not staged with modifications
await exec('yarn && git diff --quiet --exit-code yarn.lock')
// Locally
let localChangeset = JSON.parse(await readFile('changes.json.tmp'))
expect(localChangeset).toEqual({
'pkg-1': expect.objectContaining({
changelog: expect.stringContaining('some fancy addition'),
tag: 'pkg-1@0.1.0',
version: '0.1.0',
strategy: 'minor',
}),
'pkg-2': expect.objectContaining({
changelog: null,
tag: 'pkg-2@0.0.1',
version: '0.0.1',
strategy: 'patch',
}),
'pkg-3': expect.objectContaining({
changelog: null,
tag: 'pkg-3@0.0.1',
version: '0.0.1',
strategy: 'patch',
}),
'pkg-4': expect.objectContaining({
changelog: null,
tag: 'pkg-4@0.0.1',
version: '0.0.1',
strategy: 'patch',
}),
})
let localChangelog = await readFile('changelog.md')
expect(localChangelog).toEqual(expect.stringContaining('some fancy addition'))
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-1@0.1.0')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-2@0.0.1')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-3@0.0.1')
// Assert changelog updated on remote
expect((await exec('git cat-file blob origin/main:changelog.md')).stdout).toEqual(
expect.stringContaining('fancy'),
)
// -----
// Make another semantic change
await writeFile('packages/pkg-2/README.md', 'Modification.')
await exec(
'git add . && git commit -n -m "feat: some breaking feat addition" ' +
'-m "BREAKING CHANGE: This is a breaking change" && git push',
)
const { error: error2 } = await run()
if (error2) console.error(error2)
expect(error2).toBeUndefined()
// ---
// verify yarn.lock is not staged with modifications
await exec('yarn && git diff --quiet --exit-code yarn.lock')
localChangeset = JSON.parse(await readFile('changes.json.tmp'))
expect(localChangeset).toEqual({
'pkg-2': expect.objectContaining({
changelog: expect.stringContaining('breaking'),
tag: 'pkg-2@1.0.0',
version: '1.0.0',
strategy: 'major',
}),
'pkg-3': expect.objectContaining({
changelog: null,
tag: 'pkg-3@0.0.2',
version: '0.0.2',
strategy: 'patch',
}),
'pkg-4': expect.objectContaining({
changelog: null,
tag: 'pkg-4@0.0.2',
version: '0.0.2',
strategy: 'patch',
}),
})
localChangelog = await readFile('changelog.md')
expect(localChangelog).toEqual(
expect.stringContaining('fancy'), // should have old entry
)
expect(localChangelog).toEqual(
expect.stringContaining('breaking'), // should have new entry
)
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-2@1.0.0')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-3@0.0.2')
// Assert changelog updated on remote
expect((await exec('git cat-file blob origin/main:changelog.md')).stdout).toEqual(
expect.stringContaining('breaking'),
)
// assert modified manifests are correct
const pkg3Manifest = JSON.parse(
(
await exec('git cat-file blob origin/main:packages/pkg-3/package.json')
).stdout.toString(),
)
expect(pkg3Manifest.dependencies).toEqual(
expect.objectContaining({
'pkg-2': 'workspace:^1.0.0',
}),
)
},
}),
TIMEOUT,
)
it(
'runs the full monodeploy pipeline for pre-release',
setupProject({
repository: [
{
'pkg-1': {},
'pkg-2': {
dependencies: ['pkg-1'],
},
'pkg-3': { dependencies: ['pkg-2'] },
'pkg-4': { dependencies: ['pkg-3'] },
'pkg-isolated': {},
},
],
config: {
access: 'public',
changelogFilename: 'changelog.md',
changesetFilename: 'changes.json.tmp',
dryRun: false,
autoCommit: true,
autoCommitMessage: 'chore: release',
conventionalChangelogConfig: require.resolve(
'@tophat/conventional-changelog-config',
),
git: {
push: true,
remote: 'origin',
tag: true,
},
jobs: 1,
persistVersions: true,
noRegistry: false,
topological: true,
topologicalDev: true,
maxConcurrentReads: 1,
maxConcurrentWrites: 1,
prerelease: false,
prereleaseId: 'alpha',
prereleaseNPMTag: 'next',
},
testCase: async ({ run, readFile, exec, writeFile }) => {
// First semantic commit
await writeFile('packages/pkg-1/README.md', 'Modification.')
await exec('git add . && git commit -n -m "feat: some fancy addition" && git push')
const { error } = await run()
if (error) console.error(error)
expect(error).toBeUndefined()
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-1@0.1.0')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-2@0.0.1')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-3@0.0.1')
// Assert changelog updated on remote
expect((await exec('git cat-file blob origin/main:changelog.md')).stdout).toEqual(
expect.stringContaining('fancy'),
)
// -----
// Create & switch to "next" branch
await exec('git checkout -b next')
await exec('git push --set-upstream origin next')
// -----
// Make another semantic change
await writeFile('packages/pkg-1/README.md', 'Modification.')
await exec('git add . && git commit -n -m "feat: some exciting addition"')
const { error: error2 } = await run(['--prerelease'])
if (error2) console.error(error2)
expect(error2).toBeUndefined()
// ---
let localChangeset = JSON.parse(await readFile('changes.json.tmp'))
expect(localChangeset).toEqual({
'pkg-1': expect.objectContaining({
changelog: expect.stringContaining('some exciting addition'),
tag: 'pkg-1@0.2.0-alpha.0',
version: '0.2.0-alpha.0',
strategy: 'minor',
}),
'pkg-2': expect.objectContaining({
changelog: null,
tag: 'pkg-2@0.0.2-alpha.0',
version: '0.0.2-alpha.0',
strategy: 'patch',
}),
'pkg-3': expect.objectContaining({
changelog: null,
tag: 'pkg-3@0.0.2-alpha.0',
version: '0.0.2-alpha.0',
strategy: 'patch',
}),
'pkg-4': expect.objectContaining({
changelog: null,
tag: 'pkg-4@0.0.2-alpha.0',
version: '0.0.2-alpha.0',
strategy: 'patch',
}),
})
const localChangelog = await readFile('changelog.md')
expect(localChangelog).toEqual(
expect.stringContaining('fancy'), // should have old entry
)
expect(localChangelog).toEqual(
expect.stringContaining('exciting'), // should have new entry
)
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-1@0.2.0-alpha.0')
// Assert changelog updated on remote
expect((await exec('git cat-file blob origin/next:changelog.md')).stdout).toEqual(
expect.stringContaining('exciting'),
)
// Another pre-release
// Make another semantic change
await writeFile('packages/pkg-1/README.md', 'Modification.')
await exec('git add . && git commit -n -m "fix: bugfix"')
const { error: error3 } = await run(['--prerelease'])
if (error3) console.error(error3)
expect(error3).toBeUndefined()
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-1@0.2.0-alpha.1')
// ----
// Now we'll test merging "next" into "main"
await exec('git checkout main')
await exec('git merge next --no-verify --no-edit')
// Run non-prerelease. We expect all the pre-release versions to be squashed.
// No additional file modifications required, as the previous pre-release tags
// are ignored.
const { error: error4 } = await run()
if (error4) console.error(error4)
expect(error4).toBeUndefined()
localChangeset = JSON.parse(await readFile('changes.json.tmp'))
expect(localChangeset).toEqual({
'pkg-1': expect.objectContaining({
changelog: expect.stringContaining('some exciting addition'),
tag: 'pkg-1@0.2.0',
version: '0.2.0',
previousVersion: '0.1.0',
strategy: 'minor',
}),
'pkg-2': expect.objectContaining({
changelog: null,
tag: 'pkg-2@0.0.2',
version: '0.0.2',
previousVersion: '0.0.1',
strategy: 'patch',
}),
'pkg-3': expect.objectContaining({
changelog: null,
tag: 'pkg-3@0.0.2',
version: '0.0.2',
previousVersion: '0.0.1',
strategy: 'patch',
}),
'pkg-4': expect.objectContaining({
changelog: null,
tag: 'pkg-4@0.0.2',
version: '0.0.2',
previousVersion: '0.0.1',
strategy: 'patch',
}),
})
// On Remote:
// Assert tags pushed
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-1@0.2.0')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-2@0.0.2')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-3@0.0.2')
await exec('git ls-remote --exit-code --tags origin refs/tags/pkg-4@0.0.2')
},
}),
TIMEOUT,
)
}) | the_stack |
import type { NextPage } from "next";
import { useMeQuery } from "../generated/graphql";
import { ShowcaseNavbar } from "../components/showcase/Navbar";
import NextLink from "next/link";
import { useRouter } from "next/router";
import Head from "next/head";
const Home: NextPage = () => {
const { data, loading } = useMeQuery();
const router = useRouter();
if (!loading && data?.me != null) {
router.push("/app");
}
return (
<div>
<Head>
<title>Dino</title>
</Head>
<ShowcaseNavbar />
<div className="flex flex-col items-center pt-5 sm:pt-12 md:flex-row">
<div className="w-full p-1 px-9">
<h1 className="mb-1 text-5xl font-bold text-gray-900">
All your notes.
<br /> everywhere
</h1>
<p className="my-6 mt-4 text-lg font-medium text-gray-700">
<code>Dino</code> is a new way to jot down your thoughts
and all the stuff that you want to access easily and
quickly. Cos
{"'"} not everything is about <code>productivity</code>
</p>
<NextLink href="/login">
<a>
<button className="px-6 py-2 text-base font-medium border border-gray-300 rounded-sm hover:bg-gray-100">
Try Dino
</button>
</a>
</NextLink>
</div>
<div className="flex justify-center w-full pt-6 align-middle sm:pt-0">
<img
className="w-auto h-60 sm:h-96"
src="https://ouch-cdn2.icons8.com/B7tv246-7CYquGe8KkJ4sqNb__SEFbE8wrpCM-rckfM/rs:fit:912:912/czM6Ly9pY29uczgu/b3VjaC1wcm9kLmFz/c2V0cy9zdmcvNzI1/L2YwZDQyMGY4LTZh/MGMtNDU2Ny05ZDA0/LTVhZTRiNzIxMzZk/MS5zdmc.png"
alt=""
/>
</div>
</div>
<div className="flex flex-col items-center pt-20 sm:pt-16 md:flex-row">
<div className="w-full p-1 pr-2 md:w-1/4 pl-9">
<img
className="w-24 h-auto mb-4"
src="https://www.notion.so/cdn-cgi/image/format=auto,width=128,quality=100/front-static/pages/product/spot/spot-team-up.png"
alt=""
/>
<h1 className="mb-1 text-3xl font-bold text-gray-900">
Easy to use
</h1>
<p className="my-6 mt-4 text-xl font-medium text-gray-700">
Unlike other tools, <code>Dino</code> has a zero
learning curve which means you can start using it
without any confusion
</p>
</div>
<div className="flex justify-center p-3 pt-6 align-middle sm:w-3/4 sm:pt-0 sm:pl-14">
<img
// className="w-full h-auto border border-gray-300 rounded-md sm:m-3 sm:w-auto sm:h-96"
className="w-auto h-48 border border-gray-300 rounded-md w-border sm:h-96"
src="https://www.notion.so/cdn-cgi/image/format=auto,width=3840,quality=100/front-static/pages/product/value-props/ecosystem-tile-v3.png"
alt=""
/>
</div>
</div>
<div className="flex flex-col items-center pt-20 sm:pt-16 md:flex-row">
<div className="w-full p-1 pr-2 md:w-1/4 pl-9">
<img
className="w-auto h-12 mb-4"
src="https://www.notion.so/cdn-cgi/image/format=auto,width=256,quality=100/front-static/pages/product/spot/spot-workflow.png"
alt=""
/>
<h1 className="mb-1 text-3xl font-bold text-gray-900">
Clean and organised
</h1>
<p className="my-6 mt-4 text-xl font-medium text-gray-700">
<code>Dino</code> user interface is clean, minimal and
organised so you only get to see your docs when you want
to
</p>
</div>
<div className="flex justify-center p-3 pt-6 align-middle sm:w-3/4 sm:pt-0 sm:pl-14">
{/* <img
className="w-auto h-64 border border-gray-300 rounded-md w-border sm:h-96"
src="https://www.notion.so/cdn-cgi/image/format=auto,width=3840,quality=100/front-static/pages/product/value-props/context-tile.png"
alt=""
/> */}
<img
className="w-auto h-48 border border-gray-300 rounded-md w-border sm:h-96"
src="https://github.com/japrozs/dino/raw/master/assets/editor.png"
alt=""
/>
</div>
</div>
<div className="flex flex-col justify-center py-9">
<svg
className="w-auto h-20 mx-auto"
width="171"
height="171"
viewBox="0 0 171 171"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g filter="url(#filter0_d_22_34)">
<rect
x="4"
width="163"
height="163"
rx="26"
fill="#FCFAFA"
/>
<path
d="M65.5854 61.6991V7.59353L85.719 27.2531V82.2923L65.5854 61.6991Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M105.415 101.301V155.406L85.281 135.747V80.7077L105.415 101.301Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M64.7449 101.398L10.5942 101.398L30.2698 81.2814L85.3549 81.2814L64.7449 101.398Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M139.776 81.7186L85.6252 81.7186L105.301 61.6022L160.386 61.6022L139.776 81.7186Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M123.777 43.9087L104.112 62.6428L104.113 35.6888L123.777 16.9546L123.777 43.9087Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M65.6277 127.311L45.9634 146.045L45.9635 119.091L65.6277 100.357L65.6277 127.311Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M46.9402 41.9541L65.7581 61.537L38.7817 61.6304L19.9638 42.0476L46.9402 41.9541Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<path
d="M132.288 100.992L151.106 120.575L124.129 120.668L105.312 101.085L132.288 100.992Z"
fill="#252020"
stroke="white"
strokeWidth="0.5"
/>
<rect
x="7.5"
y="3.5"
width="156"
height="156"
rx="25.5"
stroke="#E3DFDF"
/>
</g>
<defs>
<filter
id="filter0_d_22_34"
x="0"
y="0"
width="171"
height="171"
filterUnits="userSpaceOnUse"
color-interpolation-filters="sRGB"
>
<feFlood
floodOpacity="0"
result="BackgroundImageFix"
/>
<feColorMatrix
in="SourceAlpha"
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
result="hardAlpha"
/>
<feOffset dy="4" />
<feGaussianBlur stdDeviation="2" />
<feComposite in2="hardAlpha" operator="out" />
<feColorMatrix
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0"
/>
<feBlend
mode="normal"
in2="BackgroundImageFix"
result="effect1_dropShadow_22_34"
/>
<feBlend
mode="normal"
in="SourceGraphic"
in2="effect1_dropShadow_22_34"
result="shape"
/>
</filter>
</defs>
</svg>
<h1 className="mx-auto mt-2 text-4xl font-bold">
Try Dino today
</h1>
<NextLink href="/login">
<a className="mx-auto w-36">
<button className="px-6 py-2 mx-auto mt-6 text-base font-medium border border-gray-300 rounded-sm w-36 hover:bg-gray-100">
Try Dino
</button>
</a>
</NextLink>
</div>
<div className="p-5 border-t border-gray-delay-300">
<div className="flex flex-wrap items-center justify-center space-x-4 space-y-2 sm:space-y-0">
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Terms {"&"} Privacy
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Security
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Status
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Docs
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Pricing
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
Blog
</a>
<a
href={"/"}
className={
"text-gray-900 hover:bg-gray-100 px-3 py-2 rounded-sm text-sm font-medium"
}
>
About
</a>
</div>
</div>
</div>
);
};
export default Home; | the_stack |
import * as Long from 'long';
import {
Predicate,
ReadOnlyLazyList,
ListComparator,
UUID
} from '../core';
import {ReplicatedMapAddEntryListenerCodec} from '../codec/ReplicatedMapAddEntryListenerCodec';
import {ReplicatedMapAddEntryListenerToKeyCodec} from '../codec/ReplicatedMapAddEntryListenerToKeyCodec';
// eslint-disable-next-line max-len
import {ReplicatedMapAddEntryListenerToKeyWithPredicateCodec} from '../codec/ReplicatedMapAddEntryListenerToKeyWithPredicateCodec';
import {ReplicatedMapAddEntryListenerWithPredicateCodec} from '../codec/ReplicatedMapAddEntryListenerWithPredicateCodec';
import {ReplicatedMapClearCodec} from '../codec/ReplicatedMapClearCodec';
import {ReplicatedMapContainsKeyCodec} from '../codec/ReplicatedMapContainsKeyCodec';
import {ReplicatedMapContainsValueCodec} from '../codec/ReplicatedMapContainsValueCodec';
import {ReplicatedMapEntrySetCodec} from '../codec/ReplicatedMapEntrySetCodec';
import {ReplicatedMapGetCodec} from '../codec/ReplicatedMapGetCodec';
import {ReplicatedMapIsEmptyCodec} from '../codec/ReplicatedMapIsEmptyCodec';
import {ReplicatedMapKeySetCodec} from '../codec/ReplicatedMapKeySetCodec';
import {ReplicatedMapPutAllCodec} from '../codec/ReplicatedMapPutAllCodec';
import {ReplicatedMapPutCodec} from '../codec/ReplicatedMapPutCodec';
import {ReplicatedMapRemoveCodec} from '../codec/ReplicatedMapRemoveCodec';
import {ReplicatedMapRemoveEntryListenerCodec} from '../codec/ReplicatedMapRemoveEntryListenerCodec';
import {ReplicatedMapSizeCodec} from '../codec/ReplicatedMapSizeCodec';
import {ReplicatedMapValuesCodec} from '../codec/ReplicatedMapValuesCodec';
import {EventType} from './EventType';
import {EntryEvent, EntryEventListener, EntryListener} from './EntryListener';
import {ListenerMessageCodec} from '../listener/ListenerMessageCodec';
import {Data} from '../serialization/Data';
import {assertNotNull} from '../util/Util';
import {ReplicatedMap} from './ReplicatedMap';
import {PartitionSpecificProxy} from './PartitionSpecificProxy';
import {MapEvent, MapEventListener} from './MapListener';
import {ClientMessage} from '../protocol/ClientMessage';
import {deserializeEntryList} from '../serialization/SerializationUtil';
type EntryEventHandler = (key: Data, value: Data, oldValue: Data, mergingValue: Data,
eventType: number, uuid: UUID, numberOfAffectedEntries: number) => void
/** @internal */
export class ReplicatedMapProxy<K, V> extends PartitionSpecificProxy implements ReplicatedMap<K, V> {
put(key: K, value: V, ttl: Long | number = 0): Promise<V> {
assertNotNull(key);
assertNotNull(value);
const valueData: Data = this.toData(value);
const keyData: Data = this.toData(key);
return this.encodeInvokeOnKey(ReplicatedMapPutCodec, keyData, keyData, valueData, ttl)
.then((clientMessage) => {
const response = ReplicatedMapPutCodec.decodeResponse(clientMessage);
return this.toObject(response);
});
}
clear(): Promise<void> {
return this.encodeInvokeOnRandomTarget(ReplicatedMapClearCodec).then(() => {});
}
get(key: K): Promise<V> {
assertNotNull(key);
const keyData = this.toData(key);
return this.encodeInvokeOnKey(ReplicatedMapGetCodec, keyData, keyData)
.then((clientMessage) => {
const response = ReplicatedMapGetCodec.decodeResponse(clientMessage);
return this.toObject(response);
});
}
containsKey(key: K): Promise<boolean> {
assertNotNull(key);
const keyData = this.toData(key);
return this.encodeInvokeOnKey(ReplicatedMapContainsKeyCodec, keyData, keyData)
.then(ReplicatedMapContainsKeyCodec.decodeResponse);
}
containsValue(value: V): Promise<boolean> {
assertNotNull(value);
const valueData = this.toData(value);
return this.encodeInvoke(ReplicatedMapContainsValueCodec, valueData)
.then(ReplicatedMapContainsValueCodec.decodeResponse);
}
size(): Promise<number> {
return this.encodeInvoke(ReplicatedMapSizeCodec)
.then(ReplicatedMapSizeCodec.decodeResponse);
}
isEmpty(): Promise<boolean> {
return this.encodeInvoke(ReplicatedMapIsEmptyCodec)
.then(ReplicatedMapIsEmptyCodec.decodeResponse);
}
remove(key: K): Promise<V> {
assertNotNull(key);
const keyData = this.toData(key);
return this.encodeInvokeOnKey(ReplicatedMapRemoveCodec, keyData, keyData)
.then((clientMessage) => {
const response = ReplicatedMapRemoveCodec.decodeResponse(clientMessage);
return this.toObject(response);
});
}
putAll(pairs: Array<[K, V]>): Promise<void> {
let pair: [K, V];
let pairId: string;
const entries: Array<[Data, Data]> = [];
for (pairId in pairs) {
pair = pairs[pairId];
const keyData = this.toData(pair[0]);
const valueData = this.toData(pair[1]);
entries.push([keyData, valueData]);
}
return this.encodeInvokeOnRandomTarget(ReplicatedMapPutAllCodec, entries).then(() => {});
}
keySet(): Promise<K[]> {
const toObject = this.toObject.bind(this);
return this.encodeInvoke(ReplicatedMapKeySetCodec)
.then((clientMessage) => {
const response = ReplicatedMapKeySetCodec.decodeResponse(clientMessage);
return response.map(toObject);
});
}
values(comparator?: ListComparator<V>): Promise<ReadOnlyLazyList<V>> {
return this.encodeInvoke(ReplicatedMapValuesCodec)
.then((clientMessage) => {
const valuesData = ReplicatedMapValuesCodec.decodeResponse(clientMessage);
if (comparator) {
const desValues = valuesData.map(this.toObject.bind(this));
return new ReadOnlyLazyList(desValues.sort(comparator), this.serializationService);
}
return new ReadOnlyLazyList(valuesData, this.serializationService);
});
}
entrySet(): Promise<Array<[K, V]>> {
return this.encodeInvoke(ReplicatedMapEntrySetCodec)
.then((clientMessage) => {
const response = ReplicatedMapEntrySetCodec.decodeResponse(clientMessage);
return deserializeEntryList(this.toObject.bind(this), response);
});
}
addEntryListenerToKeyWithPredicate(listener: EntryListener<K, V>, key: K, predicate: Predicate): Promise<string> {
return this.addEntryListenerInternal(listener, predicate, key);
}
addEntryListenerWithPredicate(listener: EntryListener<K, V>, predicate: Predicate): Promise<string> {
return this.addEntryListenerInternal(listener, predicate, undefined);
}
addEntryListenerToKey(listener: EntryListener<K, V>, key: K): Promise<string> {
return this.addEntryListenerInternal(listener, undefined, key);
}
addEntryListener(listener: EntryListener<K, V>): Promise<string> {
return this.addEntryListenerInternal(listener, undefined, undefined);
}
removeEntryListener(listenerId: string): Promise<boolean> {
return this.listenerService.deregisterListener(listenerId);
}
private addEntryListenerInternal(listener: EntryListener<K, V>, predicate: Predicate,
key: K): Promise<string> {
const toObject = this.toObject.bind(this);
const entryEventHandler = (key: Data, value: Data, oldValue: Data, mergingValue: Data,
event: number, uuid: UUID, numberOfAffectedEntries: number): void => {
const member = this.clusterService.getMember(uuid.toString());
const name = this.name;
const entryEvent = new EntryEvent<K,V>(
name,
toObject(key),
toObject(value),
toObject(oldValue),
toObject(mergingValue),
member
);
const mapEvent = new MapEvent(name, numberOfAffectedEntries, member);
const entryEventToListenerMap: { [key: number]: string } = {
[EventType.ADDED]: 'added',
[EventType.REMOVED]: 'removed',
[EventType.UPDATED]: 'updated',
[EventType.EVICTED]: 'evicted',
};
const mapEventToListenerMap: { [key: number]: string } = {
[EventType.CLEAR_ALL]: 'mapCleared',
};
const entryEventMethod = entryEventToListenerMap[event];
const mapEventMethod = mapEventToListenerMap[event];
if (listener.hasOwnProperty(entryEventMethod)) {
(listener[entryEventMethod] as EntryEventListener<K, V>).apply(null, [entryEvent]);
} else if (listener.hasOwnProperty(mapEventMethod)) {
(listener[mapEventMethod] as MapEventListener<K, V>).apply(null, [mapEvent]);
}
};
let listenerHandler: (message: ClientMessage,
handler: EntryEventHandler,
toObjectFn: (data: Data) => any) => void;
let codec: ListenerMessageCodec;
if (key && predicate) {
const keyData = this.toData(key);
const predicateData = this.toData(predicate);
codec = this.createEntryListenerToKeyWithPredicate(this.name, keyData, predicateData);
listenerHandler = ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.handle;
} else if (key && !predicate) {
const keyData = this.toData(key);
codec = this.createEntryListenerToKey(this.name, keyData);
listenerHandler = ReplicatedMapAddEntryListenerToKeyCodec.handle;
} else if (!key && predicate) {
const predicateData = this.toData(predicate);
codec = this.createEntryListenerWithPredicate(this.name, predicateData);
listenerHandler = ReplicatedMapAddEntryListenerWithPredicateCodec.handle;
} else {
codec = this.createEntryListener(this.name);
listenerHandler = ReplicatedMapAddEntryListenerCodec.handle;
}
return this.listenerService.registerListener(codec,
(m: ClientMessage) => {
listenerHandler(m, entryEventHandler, toObject);
});
}
private createEntryListener(name: string): ListenerMessageCodec {
return {
encodeAddRequest(localOnly: boolean): ClientMessage {
return ReplicatedMapAddEntryListenerCodec.encodeRequest(name, localOnly);
},
decodeAddResponse(msg: ClientMessage): UUID {
return ReplicatedMapAddEntryListenerCodec.decodeResponse(msg);
},
encodeRemoveRequest(listenerId: UUID): ClientMessage {
return ReplicatedMapRemoveEntryListenerCodec.encodeRequest(name, listenerId);
},
};
}
private createEntryListenerToKey(name: string, keyData: Data): ListenerMessageCodec {
return {
encodeAddRequest(localOnly: boolean): ClientMessage {
return ReplicatedMapAddEntryListenerToKeyCodec.encodeRequest(name, keyData, localOnly);
},
decodeAddResponse(msg: ClientMessage): UUID {
return ReplicatedMapAddEntryListenerToKeyCodec.decodeResponse(msg);
},
encodeRemoveRequest(listenerId: UUID): ClientMessage {
return ReplicatedMapRemoveEntryListenerCodec.encodeRequest(name, listenerId);
},
};
}
private createEntryListenerWithPredicate(name: string, predicateData: Data): ListenerMessageCodec {
return {
encodeAddRequest(localOnly: boolean): ClientMessage {
return ReplicatedMapAddEntryListenerWithPredicateCodec.encodeRequest(name, predicateData, localOnly);
},
decodeAddResponse(msg: ClientMessage): UUID {
return ReplicatedMapAddEntryListenerWithPredicateCodec.decodeResponse(msg);
},
encodeRemoveRequest(listenerId: UUID): ClientMessage {
return ReplicatedMapRemoveEntryListenerCodec.encodeRequest(name, listenerId);
},
};
}
private createEntryListenerToKeyWithPredicate(name: string, keyData: Data, predicateData: Data): ListenerMessageCodec {
return {
encodeAddRequest(localOnly: boolean): ClientMessage {
return ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.encodeRequest(name, keyData, predicateData,
localOnly);
},
decodeAddResponse(msg: ClientMessage): UUID {
return ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.decodeResponse(msg);
},
encodeRemoveRequest(listenerId: UUID): ClientMessage {
return ReplicatedMapRemoveEntryListenerCodec.encodeRequest(name, listenerId);
},
};
}
} | the_stack |
import { Graph, Shape, Addon } from '@antv/x6'
import insertCss from 'insert-css'
preWork() // 为了协助代码演示
// #region 初始化画布
const graph = new Graph({
container: document.getElementById('graph-container')!,
grid: true,
mousewheel: {
enabled: true,
zoomAtMousePosition: true,
modifiers: 'ctrl',
minScale: 0.5,
maxScale: 3,
},
connecting: {
router: 'manhattan',
connector: {
name: 'rounded',
args: {
radius: 8,
},
},
anchor: 'center',
connectionPoint: 'anchor',
allowBlank: false,
snap: {
radius: 20,
},
createEdge() {
return new Shape.Edge({
attrs: {
line: {
stroke: '#A2B1C3',
strokeWidth: 2,
targetMarker: {
name: 'block',
width: 12,
height: 8,
},
},
},
zIndex: 0,
})
},
validateConnection({ targetMagnet }) {
return !!targetMagnet
},
},
highlighting: {
magnetAdsorbed: {
name: 'stroke',
args: {
attrs: {
fill: '#5F95FF',
stroke: '#5F95FF',
},
},
},
},
resizing: true,
rotating: true,
selecting: {
enabled: true,
rubberband: true,
showNodeSelectionBox: true,
},
snapline: true,
keyboard: true,
clipboard: true,
})
// #endregion
// #region 初始化 stencil
const stencil = new Addon.Stencil({
title: '流程图',
target: graph,
stencilGraphWidth: 200,
stencilGraphHeight: 180,
collapsable: true,
groups: [
{
title: '基础流程图',
name: 'group1',
},
{
title: '系统设计图',
name: 'group2',
graphHeight: 250,
layoutOptions: {
rowHeight: 70,
},
},
],
layoutOptions: {
columns: 2,
columnWidth: 80,
rowHeight: 55,
},
})
document.getElementById('stencil')!.appendChild(stencil.container)
// #endregion
// #region 快捷键与事件
// copy cut paste
graph.bindKey(['meta+c', 'ctrl+c'], () => {
const cells = graph.getSelectedCells()
if (cells.length) {
graph.copy(cells)
}
return false
})
graph.bindKey(['meta+x', 'ctrl+x'], () => {
const cells = graph.getSelectedCells()
if (cells.length) {
graph.cut(cells)
}
return false
})
graph.bindKey(['meta+v', 'ctrl+v'], () => {
if (!graph.isClipboardEmpty()) {
const cells = graph.paste({ offset: 32 })
graph.cleanSelection()
graph.select(cells)
}
return false
})
//undo redo
graph.bindKey(['meta+z', 'ctrl+z'], () => {
if (graph.history.canUndo()) {
graph.history.undo()
}
return false
})
graph.bindKey(['meta+shift+z', 'ctrl+shift+z'], () => {
if (graph.history.canRedo()) {
graph.history.redo()
}
return false
})
// select all
graph.bindKey(['meta+a', 'ctrl+a'], () => {
const nodes = graph.getNodes()
if (nodes) {
graph.select(nodes)
}
})
//delete
graph.bindKey('backspace', () => {
const cells = graph.getSelectedCells()
if (cells.length) {
graph.removeCells(cells)
}
})
// zoom
graph.bindKey(['ctrl+1', 'meta+1'], () => {
const zoom = graph.zoom()
if (zoom < 1.5) {
graph.zoom(0.1)
}
})
graph.bindKey(['ctrl+2', 'meta+2'], () => {
const zoom = graph.zoom()
if (zoom > 0.5) {
graph.zoom(-0.1)
}
})
// 控制连接桩显示/隐藏
const showPorts = (ports: NodeListOf<SVGElement>, show: boolean) => {
for (let i = 0, len = ports.length; i < len; i = i + 1) {
ports[i].style.visibility = show ? 'visible' : 'hidden'
}
}
graph.on('node:mouseenter', () => {
const container = document.getElementById('graph-container')!
const ports = container.querySelectorAll(
'.x6-port-body',
) as NodeListOf<SVGElement>
showPorts(ports, true)
})
graph.on('node:mouseleave', () => {
const container = document.getElementById('graph-container')!
const ports = container.querySelectorAll(
'.x6-port-body',
) as NodeListOf<SVGElement>
showPorts(ports, false)
})
// #endregion
// #region 初始化图形
const ports = {
groups: {
top: {
position: 'top',
attrs: {
circle: {
r: 4,
magnet: true,
stroke: '#5F95FF',
strokeWidth: 1,
fill: '#fff',
style: {
visibility: 'hidden',
},
},
},
},
right: {
position: 'right',
attrs: {
circle: {
r: 4,
magnet: true,
stroke: '#5F95FF',
strokeWidth: 1,
fill: '#fff',
style: {
visibility: 'hidden',
},
},
},
},
bottom: {
position: 'bottom',
attrs: {
circle: {
r: 4,
magnet: true,
stroke: '#5F95FF',
strokeWidth: 1,
fill: '#fff',
style: {
visibility: 'hidden',
},
},
},
},
left: {
position: 'left',
attrs: {
circle: {
r: 4,
magnet: true,
stroke: '#5F95FF',
strokeWidth: 1,
fill: '#fff',
style: {
visibility: 'hidden',
},
},
},
},
},
items: [
{
group: 'top',
},
{
group: 'right',
},
{
group: 'bottom',
},
{
group: 'left',
},
],
}
Graph.registerNode(
'custom-rect',
{
inherit: 'rect',
width: 66,
height: 36,
attrs: {
body: {
strokeWidth: 1,
stroke: '#5F95FF',
fill: '#EFF4FF',
},
text: {
fontSize: 12,
color: '#262626',
},
},
ports: { ...ports },
},
true,
)
Graph.registerNode(
'custom-polygon',
{
inherit: 'polygon',
width: 66,
height: 36,
attrs: {
body: {
strokeWidth: 1,
stroke: '#5F95FF',
fill: '#EFF4FF',
},
text: {
fontSize: 12,
color: '#262626',
},
},
ports: {
...ports,
items: [
{
group: 'top',
},
{
group: 'bottom',
},
],
},
},
true,
)
Graph.registerNode(
'custom-circle',
{
inherit: 'circle',
width: 45,
height: 45,
attrs: {
body: {
strokeWidth: 1,
stroke: '#5F95FF',
fill: '#EFF4FF',
},
text: {
fontSize: 12,
color: '#262626',
},
},
ports: { ...ports },
},
true,
)
Graph.registerNode(
'custom-image',
{
inherit: 'rect',
width: 52,
height: 52,
markup: [
{
tagName: 'rect',
selector: 'body',
},
{
tagName: 'image',
},
{
tagName: 'text',
selector: 'label',
},
],
attrs: {
body: {
strokeWidth: 1,
stroke: '#26C160',
fill: '#26C160',
},
image: {
width: 26,
height: 26,
refX: 13,
refY: 16,
},
label: {
refX: 3,
refY: 2,
textAnchor: 'left',
textVerticalAnchor: 'top',
fontSize: 12,
fill: '#fff',
},
},
ports: { ...ports },
},
true,
)
const r1 = graph.createNode({
shape: 'custom-rect',
label: '开始',
attrs: {
body: {
rx: 20,
ry: 26,
},
},
})
const r2 = graph.createNode({
shape: 'custom-rect',
label: '过程',
})
const r3 = graph.createNode({
shape: 'custom-rect',
attrs: {
body: {
rx: 6,
ry: 6,
},
},
label: '可选过程',
})
const r4 = graph.createNode({
shape: 'custom-polygon',
attrs: {
body: {
refPoints: '0,10 10,0 20,10 10,20',
},
},
label: '决策',
})
const r5 = graph.createNode({
shape: 'custom-polygon',
attrs: {
body: {
refPoints: '10,0 40,0 30,20 0,20',
},
},
label: '数据',
})
const r6 = graph.createNode({
shape: 'custom-circle',
label: '连接',
})
stencil.load([r1, r2, r3, r4, r5, r6], 'group1')
const m1 = graph.createNode({
shape: 'custom-image',
label: 'Client',
attrs: {
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/687b6cb9-4b97-42a6-96d0-34b3099133ac.svg',
},
},
})
const m2 = graph.createNode({
shape: 'custom-image',
label: 'Http',
attrs: {
body: {
stroke: '#2CB9FF',
fill: '#2CB9FF',
},
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/dc1ced06-417d-466f-927b-b4a4d3265791.svg',
},
},
})
const m3 = graph.createNode({
shape: 'custom-image',
label: 'Api',
attrs: {
body: {
stroke: '#5AB0BE',
fill: '#5AB0BE',
},
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/c55d7ae1-8d20-4585-bd8f-ca23653a4489.svg',
},
},
})
const m4 = graph.createNode({
shape: 'custom-image',
label: 'Sql',
attrs: {
body: {
stroke: '#E6475B',
fill: '#E6475B',
},
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/6eb71764-18ed-4149-b868-53ad1542c405.svg',
},
},
})
const m5 = graph.createNode({
shape: 'custom-image',
label: 'Clound',
attrs: {
body: {
stroke: '#DA2625',
fill: '#DA2625',
},
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/c36fe7cb-dc24-4854-aeb5-88d8dc36d52e.svg',
},
},
})
const m6 = graph.createNode({
shape: 'custom-image',
label: 'Mq',
attrs: {
body: {
stroke: '#FFB15F',
fill: '#FFB15F',
},
image: {
'xlink:href':
'https://gw.alipayobjects.com/zos/bmw-prod/2010ac9f-40e7-49d4-8c4a-4fcf2f83033b.svg',
},
},
})
stencil.load([m1, m2, m3, m4, m5, m6], 'group2')
// #endregion
function preWork() {
// 这里协助演示的代码,在实际项目中根据实际情况进行调整
const container = document.getElementById('container')!
const stencilContainer = document.createElement('div')
stencilContainer.id = 'stencil'
const graphContainer = document.createElement('div')
graphContainer.id = 'graph-container'
container.appendChild(stencilContainer)
container.appendChild(graphContainer)
insertCss(`
#container {
display: flex;
border: 1px solid #dfe3e8;
}
#stencil {
width: 180px;
height: 100%;
position: relative;
border-right: 1px solid #dfe3e8;
}
#graph-container {
width: calc(100% - 180px);
height: 100%;
}
.x6-widget-stencil {
background-color: #fff;
}
.x6-widget-stencil-title {
background-color: #fff;
}
.x6-widget-stencil-group-title {
background-color: #fff !important;
}
.x6-widget-transform {
margin: -1px 0 0 -1px;
padding: 0px;
border: 1px solid #239edd;
}
.x6-widget-transform > div {
border: 1px solid #239edd;
}
.x6-widget-transform > div:hover {
background-color: #3dafe4;
}
.x6-widget-transform-active-handle {
background-color: #3dafe4;
}
.x6-widget-transform-resize {
border-radius: 0;
}
.x6-widget-selection-inner {
border: 1px solid #239edd;
}
.x6-widget-selection-box {
opacity: 0;
}
`)
} | the_stack |
import parseUrlParameter from "@ff/browser/parseUrlParameter";
import Commander from "@ff/core/Commander";
import TypeRegistry from "@ff/core/TypeRegistry";
import Notification from "@ff/ui/Notification";
import System from "@ff/graph/System";
import coreTypes from "./coreTypes";
import explorerTypes from "./explorerTypes";
import documentTemplate from "client/templates/default.svx.json";
import CVDocumentProvider from "../components/CVDocumentProvider";
import CVDocument from "../components/CVDocument";
import CVAssetManager from "../components/CVAssetManager";
import CVAssetReader from "../components/CVAssetReader";
import CVAnalytics from "../components/CVAnalytics";
import CVToolProvider from "../components/CVToolProvider";
import NVEngine from "../nodes/NVEngine";
import NVDocuments from "../nodes/NVDocuments";
import NVTools from "../nodes/NVTools";
import MainView from "../ui/explorer/MainView";
import { EDerivativeQuality } from "client/schema/model";
import CVARManager from "client/components/CVARManager";
import { EUIElements } from "client/components/CVInterface";
import { EBackgroundStyle } from "client/schema/setup";
import CRenderer from "client/../../libs/ff-scene/source/components/CRenderer";
////////////////////////////////////////////////////////////////////////////////
/**
* Initial properties of the Voyager Explorer main [[ExplorerApplication]].
*/
export interface IExplorerApplicationProps
{
/** URL of the root asset folder. */
root?: string;
/** Custom URL to Draco files (Defaults to online CDN). */
dracoRoot?: string;
/** Custom URL to resource files (Defaults to online CDN). */
resourceRoot?: string;
/** URL of the document to load and display at startup. */
document?: string;
/** URL of a model (supported formats: gltf, glb) to load and display at startup. */
model?: string;
/** URL of a geometry (supported formats: obj, ply) to load and display at startup. */
geometry?: string;
/** If a geometry URL is given, optional URL of a color texture to use with the geometry. */
texture?: string;
/** If a geometry URL is given, optional URL of a occlusion texture to use with the geometry. */
occlusion?: string;
/** If a geometry URL is given, optional URL of a normal texture to use with the geometry. */
normals?: string;
/** When loading a model or geometry, the quality level to set for the asset.
Valid options: "thumb", "low", "medium", "high". */
quality?: string;
/** Mode string starts Explorer in a specific ui configuration, i.e. no UI. */
uiMode?: string;
}
/**
* Voyager Explorer main application.
*/
export default class ExplorerApplication
{
protected static splashMessage = `
_________ .__ __ .__ .__ ________ ________
/ _____/ _____ |__|/ |_| |__ __________ ____ |__|____ ____ \\_____ \\\\______ \\
\\_____ \\ / \\| \\ __\\ | \\ / ___/ _ \\ / \\| \\__ \\ / \\ _(__ < | | \\
/ \\ Y Y \\ || | | Y \\\\___ ( <_> ) | \\ |/ __ \\| | \\ / \\| \` \\
/_______ /__|_| /__||__| |___| /____ >____/|___| /__(____ /___| / /______ /_______ /
\\/ \\/ \\/ \\/ \\/ \\/ \\/ \\/ \\/
Voyager - 3D Explorer and Tool Suite
3D Foundation Project
(c) 2021 Smithsonian Institution
https://3d.si.edu
https://github.com/smithsonian/dpo-voyager
-----------------------------------------------------
Version: ${ENV_VERSION}
-----------------------------------------------------
`;
readonly props: IExplorerApplicationProps;
readonly system: System;
readonly commander: Commander;
protected get assetManager() {
return this.system.getMainComponent(CVAssetManager);
}
protected get assetReader() {
return this.system.getMainComponent(CVAssetReader);
}
protected get documentProvider() {
return this.system.getMainComponent(CVDocumentProvider);
}
protected get analytics() {
return this.system.getMainComponent(CVAnalytics);
}
constructor(parent: HTMLElement, props?: IExplorerApplicationProps, embedded?: boolean)
{
this.props = props || {};
console.log(ExplorerApplication.splashMessage);
// register components
const registry = new TypeRegistry();
registry.add(coreTypes);
registry.add(explorerTypes);
this.commander = new Commander();
const system = this.system = new System(registry);
const engine = system.graph.createCustomNode(NVEngine);
system.graph.createCustomNode(NVTools);
system.graph.createCustomNode(NVDocuments);
// start timing load
this.analytics.startTimer();
if (parent) {
// create a view and attach to parent
new MainView(this).appendTo(parent);
}
if (!embedded) {
// initialize default document
this.documentProvider.createDocument(documentTemplate as any);
this.evaluateProps();
}
//*** Support message passing over channel 2 ***//
/*{
// Add listener for the intial port transfer message
var port2;
window.addEventListener('message', initPort);
// Setup port for message passing
function initPort(e) {
port2 = e.ports[0];
if(port2) {
port2.onmessage = onMessage;
}
}
// Handle messages received on port2
function onMessage(e) {
if (ENV_DEVELOPMENT) {
console.log('Message received by VoyagerExplorer: "' + e.data + '"');
}
const analytics = system.getMainComponent(CVAnalytics);
if (e.data === "enableAR") {
const ARIns = system.getMainComponent(CVARManager).ins;
ARIns.enabled.setValue(true);
analytics.sendProperty("AR.enabled", true);
}
}
}*/
// start rendering
engine.pulse.start();
}
dispose()
{
// Clean up assuming a component disconnect means it won't be reconnected
this.assetReader.dispose();
this.documentProvider.activeComponent.clearNodeTree();
this.system.getMainComponent(CRenderer).views.forEach(view => view.dispose());
//this.documentProvider.activeComponent.setup.node.dispose();
//this.system.graph.clear();
this.documentProvider.activeComponent.setup.tape.dispose();
this.documentProvider.activeComponent.setup.floor.dispose();
this.documentProvider.activeComponent.setup.grid.dispose();
}
setBaseUrl(url: string)
{
this.assetManager.baseUrl = url;
}
loadDocument(documentPath: string, merge?: boolean, quality?: string, uiMode?: string): Promise<CVDocument>
{
const dq = EDerivativeQuality[quality];
return this.assetReader.getJSON(documentPath)
.then(data => {
merge = merge === undefined ? !data.lights && !data.cameras : merge;
return this.documentProvider.amendDocument(data, documentPath, merge);
})
.then(document => {
if (isFinite(dq)) {
document.setup.viewer.ins.quality.setValue(dq);
}
return document;
});
}
loadModel(modelPath: string, quality: string)
{
return this.documentProvider.appendModel(modelPath, quality);
}
loadGeometry(geoPath: string, colorMapPath?: string,
occlusionMapPath?: string, normalMapPath?: string, quality?: string)
{
return this.documentProvider.appendGeometry(
geoPath, colorMapPath, occlusionMapPath, normalMapPath, quality);
}
evaluateProps()
{
const props = this.props;
const manager = this.assetManager;
props.root = props.root || parseUrlParameter("root") || parseUrlParameter("r");
props.dracoRoot = props.dracoRoot || parseUrlParameter("dracoRoot") || parseUrlParameter("dr");
props.resourceRoot = props.resourceRoot || parseUrlParameter("resourceRoot") || parseUrlParameter("rr");
props.document = props.document || parseUrlParameter("document") || parseUrlParameter("d");
props.model = props.model || parseUrlParameter("model") || parseUrlParameter("m");
props.geometry = props.geometry || parseUrlParameter("geometry") || parseUrlParameter("g");
props.texture = props.texture || parseUrlParameter("texture") || parseUrlParameter("t");
props.occlusion = props.occlusion || parseUrlParameter("occlusion") || parseUrlParameter("o");
props.normals = props.normals || parseUrlParameter("normals") || parseUrlParameter("n");
props.quality = props.quality || parseUrlParameter("quality") || parseUrlParameter("q");
props.uiMode = props.uiMode || parseUrlParameter("uiMode") || parseUrlParameter("u");
const url = props.root || props.document || props.model || props.geometry;
this.setBaseUrl(new URL(url || ".", window.location as any).href);
// Config custom UI layout
if (props.uiMode) {
//if (props.uiMode.toLowerCase().indexOf("none") !== -1) {
// this.documentProvider.activeComponent.setup.interface.ins.visibleElements.setValue(0);
//}
let elementValues = 0;
let hasValidParam = false;
const enumNames = Object.values(EUIElements).filter(value => typeof value === 'string') as string[];
const uiParams = props.uiMode.split('|');
uiParams.forEach(param => {
const stdParam = param.toLowerCase();
if(enumNames.includes(stdParam)) {
elementValues += EUIElements[stdParam];
hasValidParam = true;
}
});
if(hasValidParam) {
this.documentProvider.activeComponent.setup.interface.ins.visibleElements.setValue(elementValues);
}
}
if(props.dracoRoot) {
// Set custom Draco path
this.assetReader.setDracoPath(props.dracoRoot);
}
if(props.resourceRoot) {
// Set custom resource path
this.assetReader.setSystemAssetPath(props.resourceRoot);
}
if (props.document) {
// first loading priority: document
props.document = props.root ? props.document : manager.getAssetName(props.document);
this.loadDocument(props.document, undefined, props.quality, props.uiMode)
.then(() => this.assetManager.ins.baseUrlValid.setValue(true))
.catch(error => Notification.show(`Failed to load document: ${error.message}`, "error"));
}
else if (props.model) {
// second loading priority: model
props.model = props.root ? props.model : manager.getAssetName(props.model);
this.assetReader.getText(props.model) // make sure we have a valid model path
.then(() => {
this.loadModel(props.model, props.quality);
this.assetManager.ins.baseUrlValid.setValue(true);
})
.catch(error => Notification.show(`Bad Model Path: ${error.message}`, "error"));
}
else if (props.geometry) {
// third loading priority: geometry (plus optional color texture)
props.geometry = props.root ? props.geometry : manager.getAssetName(props.geometry);
props.texture = props.root ? props.texture : manager.getAssetName(props.texture);
props.occlusion = props.root ? props.occlusion : manager.getAssetName(props.occlusion);
props.normals = props.root ? props.normals : manager.getAssetName(props.normals);
this.assetReader.getText(props.geometry) // make sure we have a valid geometry path
.then(() => {
this.loadGeometry(props.geometry, props.texture, props.occlusion, props.normals, props.quality);
this.assetManager.ins.baseUrlValid.setValue(true);
})
.catch(error => Notification.show(`Bad Geometry Path: ${error.message}`, "error"));
}
else if (props.root) {
// if nothing else specified, try to read "scene.svx.json" from the current folder
this.loadDocument("scene.svx.json", undefined)
.then(() => this.assetManager.ins.baseUrlValid.setValue(true))
.catch(() => {});
}
}
////////////////////////////////////////////
//** API functions for external control **//
////////////////////////////////////////////
toggleAnnotations()
{
const viewerIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.viewer.ins;
const toolIns = this.system.getMainComponent(CVToolProvider).ins;
if (toolIns.visible.value) {
toolIns.visible.setValue(false);
}
viewerIns.annotationsVisible.setValue(!viewerIns.annotationsVisible.value);
this.analytics.sendProperty("Annotations.Visible", viewerIns.annotationsVisible.value);
}
toggleReader()
{
const readerIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.reader.ins;
readerIns.enabled.setValue(!readerIns.enabled.value);
this.analytics.sendProperty("Reader.Enabled", readerIns.enabled.value);
}
toggleTours()
{
const tourIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.tours.ins;
const readerIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.reader.ins;
if (tourIns.enabled.value) {
tourIns.enabled.setValue(false);
}
else {
if (readerIns.enabled.value) {
readerIns.enabled.setValue(false); // disable reader
}
tourIns.enabled.setValue(true); // enable tours
tourIns.tourIndex.setValue(-1); // show tour menu
}
this.analytics.sendProperty("Tours.Enabled", tourIns.enabled.value);
}
toggleTools()
{
const toolIns = this.system.getMainComponent(CVToolProvider).ins;
const viewerIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.viewer.ins;
if (viewerIns.annotationsVisible.value) {
viewerIns.annotationsVisible.setValue(false);
}
toolIns.visible.setValue(!toolIns.visible.value);
this.analytics.sendProperty("Tools.Visible", toolIns.visible.value);
}
toggleMeasurement()
{
const tapeIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.tape.ins;
tapeIns.visible.setValue(!tapeIns.visible.value);
}
enableAR()
{
const ARIns = this.system.getMainComponent(CVARManager).ins;
ARIns.enabled.setValue(true);
this.analytics.sendProperty("AR.enabled", true);
}
// Returns an array of objects with the article data for the current scene
getArticles()
{
const reader = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.reader;
const articles = reader.articles.map(entry => entry.article.data);
return articles;
}
// Returns euler angles (yaw/pitch) for orbit navigation
getCameraOrbit()
{
const orbitNavIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.navigation.ins;
return orbitNavIns.orbit.value.slice(0,2);
}
// Sets euler angles (yaw/pitch) for orbit navigation
setCameraOrbit( yaw: string, pitch: string)
{
const orbitNavIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.navigation.ins;
const yawNum = parseFloat(yaw);
const pitchNum = parseFloat(pitch);
if (!isNaN(yawNum) && !isNaN(pitchNum)) {
orbitNavIns.orbit.setValue([yawNum, pitchNum, 0.0]);
}
else {
console.log("Error: setCameraOrbit param is not a number.");
}
}
// Set background color
setBackgroundColor(color0: string, color1?: string)
{
const backgroundIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.background.ins;
const div = document.createElement('div');
div.id = 'temp-color';
document.getElementsByTagName("voyager-explorer")[0].appendChild(div);
div.style.color = color0;
if(div.style.color !== '') {
const convColor0 = getComputedStyle(div).color;
const colorArray0 = convColor0.split("(")[1].split(")")[0].split(",").map(component => parseInt(component)/255);
backgroundIns.color0.setValue(colorArray0);
}
else {
console.log("Error: Color0 param is invalid.");
}
if(color1) {
div.style.color = color1;
if(div.style.color !== '') {
const convColor1 = getComputedStyle(div).color;
const colorArray1 = convColor1.split("(")[1].split(")")[0].split(",").map(component => parseInt(component)/255);
backgroundIns.color1.setValue(colorArray1);
}
else {
console.log("Error: Color1 param is invalid.");
}
}
document.getElementsByTagName("voyager-explorer")[0].removeChild(div);
}
// Set background style (Solid, LinearGradient, RadialGradient)
setBackgroundStyle(style: string)
{
const backgroundIns = this.system.getMainComponent(CVDocumentProvider).activeComponent.setup.background.ins;
const enumNames = Object.values(EBackgroundStyle).filter(value => typeof value === 'string') as string[];
const foundStyle = enumNames.find(name => name.toLowerCase() === style.toLowerCase());
if(foundStyle !== undefined) {
backgroundIns.style.setValue(EBackgroundStyle[foundStyle]);
}
else {
console.log("Error: Style param is invalid.");
}
}
}
window["VoyagerExplorer"] = ExplorerApplication; | the_stack |
import { FastPath } from './fastPath';
import { Doc, concat, indent, join, hardline, lineSuffix, breakParent } from './docBuilder';
import { PrintFn } from './printer';
import { locStart, locEnd, isNextLineEmpty, hasNewLine, isPreviousLineEmpty } from './util';
import { Options } from './options';
import * as luaparse from 'luaparse';
enum CommentType {
Leading,
Trailing,
Dangling,
DanglingStatement
}
function getChildrenOfNode(node: luaparse.Node): luaparse.Node[] {
const keys = Object.keys(node);
const children: luaparse.Node[] = [];
function addChild(n: luaparse.Node) {
if (n && typeof (n.type) === 'string' && n.type !== 'Comment') {
let idx;
for (idx = children.length - 1; idx >= 0; --idx) {
if (locStart(children[idx]) <= locStart(n) &&
locEnd(children[idx]) <= locEnd(node)) {
break;
}
}
children.splice(idx + 1, 0, n);
}
};
for (const key of keys) {
const val = node[key];
if (Array.isArray(val)) {
val.forEach(addChild);
} else if (val) {
addChild(val);
}
}
return children;
}
export function attachComments(ast: luaparse.Chunk, options: Options) {
for (const comment of ast.comments) {
decorateComment(ast, comment);
const precedingNode = (comment as any).precedingNode as luaparse.Node;
const enclosingNode = (comment as any).enclosingNode as luaparse.Node;
const followingNode = (comment as any).followingNode as luaparse.Node;
if (hasNewLine(options.sourceText, locStart(comment), { searchBackwards: true })) {
if (
handleStatementsWithNoBodyComments(enclosingNode, comment) ||
handleFunctionBodyComments(precedingNode, enclosingNode, comment) ||
handleIfStatementsWithNoBodyComments(precedingNode, enclosingNode, followingNode, comment)
) {
// Handled
}
else if (followingNode) {
addLeadingComment(followingNode, comment);
} else if (precedingNode) {
addTrailingComment(precedingNode, comment);
}
else if (enclosingNode) {
addDanglingComment(enclosingNode, comment);
}
else {
addDanglingComment(ast, comment);
}
} else {
if (
handleExpressionBeginComments(precedingNode, enclosingNode, comment) ||
handleDanglingIfStatementsWithNoBodies(precedingNode, enclosingNode, comment)
) {
// Handled
}
else if (precedingNode) {
addTrailingComment(precedingNode, comment);
}
else if (followingNode) {
addLeadingComment(followingNode, comment);
}
else if (enclosingNode) {
addDanglingComment(enclosingNode, comment);
}
else {
addDanglingComment(ast, comment);
}
}
}
}
export function injectShebang(ast: luaparse.Chunk, options: Options) {
if (!options.sourceText.startsWith('#!')) {
return;
}
const endLine = options.sourceText.indexOf('\n');
const raw = options.sourceText.slice(0, endLine);
const shebang = options.sourceText.slice(2, endLine);
ast.comments.push({
type: 'Comment',
loc: {
start: {
line: 1,
column: 0
},
end: {
line: 1,
column: endLine
}
},
range: [0, endLine],
raw,
value: shebang
});
}
export function printDanglingComments(path: FastPath, sameIndent: boolean = false): Doc {
const node = path.getValue();
if (!node || !node.attachedComments) {
return '';
}
const parts: Doc[] = [];
path.forEach((commentPath) => {
const comment = commentPath.getValue();
if (comment.commentType === CommentType.Dangling) {
parts.push(comment.raw);
}
}, 'attachedComments');
if (parts.length === 0) {
return '';
}
if (sameIndent) {
return join(hardline, parts);
}
return indent(concat([hardline, join(hardline, parts)]));
}
// Prints comments that were attached to the end of a statement that leads to a block. Since luaparse doesn't give us
// AST nodes for blocks, we need to catch these cases and print them manually.
export function printDanglingStatementComments(path: FastPath): Doc {
const node = path.getValue();
if (!node || !node.attachedComments) {
return '';
}
const parts: Doc[] = [];
path.forEach((commentPath) => {
const comment = commentPath.getValue();
if (comment.commentType === CommentType.DanglingStatement) {
parts.push(' ');
parts.push(comment.raw);
}
}, 'attachedComments');
if (parts.length === 0) {
return '';
}
return concat(parts);
}
function printLeadingComment(path: FastPath, options: Options) {
const comment = path.getValue() as luaparse.Comment;
const isBlockComment = comment.raw.startsWith('--[[');
if (isBlockComment) {
return concat([
comment.raw,
hasNewLine(options.sourceText, locEnd(comment)) ? hardline : ' ']
);
}
const parts = [];
parts.push(comment.raw);
parts.push(hardline);
// If the leading comment contains a trailing newline, make sure we preserve it.
if (isNextLineEmpty(options.sourceText, locEnd(comment))) {
parts.push(hardline);
}
return concat(parts);
}
function printTrailingComment(path: FastPath, options: Options) {
const comment = path.getValue() as luaparse.Comment;
if (hasNewLine(options.sourceText, locStart(comment), { searchBackwards: true })) {
const previousLineEmpty = isPreviousLineEmpty(options.sourceText, locStart(comment));
return concat([hardline, previousLineEmpty ? hardline : '', comment.raw]);
}
if (comment.raw.startsWith('--[[')) {
return concat([' ', comment.raw]);
}
const parts = [];
// Preserve newline before comment
if (isNextLineEmpty(options.sourceText, locStart(comment), { searchBackwards: true })) {
parts.push(hardline);
}
parts.push(' ');
parts.push(comment.raw);
parts.push(breakParent);
return lineSuffix(concat(parts));
}
export function printComments(path: FastPath, options: Options, print: PrintFn) {
const node = path.getValue();
const printed = print(path);
const comments = (node as any).attachedComments;
if (!comments || comments.length === 0) {
return printed;
}
const leadingParts: Doc[] = [];
const trailingParts: Doc[] = [printed];
path.forEach((commentPath) => {
const comment = commentPath.getValue();
const commentType = comment.commentType as CommentType;
switch (commentType) {
case CommentType.Leading:
leadingParts.push(printLeadingComment(path, options));
break;
case CommentType.Trailing:
trailingParts.push(printTrailingComment(path, options));
break;
}
}, 'attachedComments');
return concat(leadingParts.concat(trailingParts));
}
function decorateComment(node: luaparse.Node, comment: luaparse.Comment) {
const childNodes = getChildrenOfNode(node);
let precedingNode: luaparse.Node | null = null;
let followingNode: luaparse.Node | null = null;
let left = 0;
let right = childNodes.length;
while (left < right) {
const middle = Math.floor((left + right) / 2);
const childNode = childNodes[middle];
// Does the comment completely fall within the range of this node?
if (
locStart(childNode) - locStart(comment) <= 0 &&
locEnd(comment) - locEnd(childNode) <= 0
) {
(comment as any).enclosingNode = childNode;
decorateComment(childNode, comment);
return;
}
// Does this node precede the comment?
if (locEnd(childNode) - locStart(comment) <= 0) {
precedingNode = childNode;
left = middle + 1;
continue;
}
// Does this node follow the comment?
if (locEnd(comment) - locStart(childNode) <= 0) {
followingNode = childNode;
right = middle;
continue;
}
}
if (precedingNode) {
(comment as any).precedingNode = precedingNode;
}
if (followingNode) {
(comment as any).followingNode = followingNode;
}
}
function addComment(node: luaparse.Node, comment: luaparse.Comment) {
const comments = (node as any).attachedComments || ((node as any).attachedComments = []);
comments.push(comment);
}
function addLeadingComment(node: luaparse.Node, comment: luaparse.Comment) {
(comment as any).commentType = CommentType.Leading;
addComment(node, comment);
}
function addDanglingComment(node: luaparse.Node, comment: luaparse.Comment) {
(comment as any).commentType = CommentType.Dangling;
addComment(node, comment);
}
function addDanglingStatementComment(node: luaparse.Node, comment: luaparse.Comment) {
(comment as any).commentType = CommentType.DanglingStatement;
addComment(node, comment);
}
function addTrailingComment(node: luaparse.Node, comment: luaparse.Comment) {
(comment as any).commentType = CommentType.Trailing;
addComment(node, comment);
}
function handleStatementsWithNoBodyComments(enclosingNode: luaparse.Node, comment: luaparse.Comment) {
if (!enclosingNode || (enclosingNode as any).body == null) {
return false;
}
// Node has a comment in the body but no statements to attach to.. attach it as a dangling comment instead.
if ((enclosingNode as any).body.length === 0) {
addDanglingComment(enclosingNode, comment);
return true;
}
return false;
}
// Handle comments in functions with no bodies. The nearest node will be either the last function argument or the
// function name itself. We need to relocate these to dangling comments on the function itself so it can print them
// itself.
function handleFunctionBodyComments(precedingNode: luaparse.Node,
enclosingNode: luaparse.Node, comment: luaparse.Comment) {
if (!enclosingNode || enclosingNode.type !== 'FunctionDeclaration' || enclosingNode.body.length > 0) {
return false;
}
if (enclosingNode.parameters.length > 0 &&
enclosingNode.parameters[enclosingNode.parameters.length - 1] === precedingNode) {
addDanglingComment(enclosingNode, comment);
return true;
}
if (precedingNode && precedingNode.type === 'Identifier') {
addDanglingComment(enclosingNode, comment);
return true;
}
return false;
}
// Handle comments in IfStatement clauses where the clause has no body. We need to relocate them to the appropriate
// clause as a dangling comment.
function handleIfStatementsWithNoBodyComments(precedingNode: luaparse.Node,
enclosingNode: luaparse.Node, followingNode: luaparse.Node, comment: luaparse.Comment) {
if (!enclosingNode || enclosingNode.type !== 'IfStatement') {
return false;
}
if (followingNode && (followingNode.type === 'ElseifClause' || followingNode.type === 'ElseClause')) {
addDanglingComment(precedingNode, comment);
return true;
}
if (precedingNode && precedingNode.type === 'ElseClause') {
addDanglingComment(precedingNode, comment);
return true;
}
return false;
}
// Handle trailing comments on expressions whose AST range includes the body. We need to relocate them to dangling
// comments so they can print them when appropriate.
function handleExpressionBeginComments(precedingNode: luaparse.Node,
enclosingNode: luaparse.Node, comment: luaparse.Comment) {
if (comment.raw.startsWith('--[[')) {
return false;
}
if (!enclosingNode) {
return false;
}
switch (enclosingNode.type) {
case 'WhileStatement':
if (precedingNode === enclosingNode.condition) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'DoStatement':
case 'RepeatStatement':
if (precedingNode == null) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'FunctionDeclaration':
if (
(enclosingNode.parameters.length &&
precedingNode === enclosingNode.parameters[enclosingNode.parameters.length - 1]) ||
(precedingNode === enclosingNode.identifier)
) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'ForNumericStatement':
if (precedingNode === enclosingNode.end || precedingNode === enclosingNode.step) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'ForGenericStatement':
if (precedingNode === enclosingNode.iterators[enclosingNode.iterators.length - 1]) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'IfClause':
case 'ElseifClause':
// If the comment would be assigned to the condition, but exists after it, then attach it to the clause.
if (precedingNode === enclosingNode.condition &&
comment.loc.start.column > precedingNode.loc.start.column) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
case 'ElseClause':
// If the comment has no preceding node but is enclosed by the ElseClause, then it must be a trailing
// statement comment
if (precedingNode == null) {
addDanglingStatementComment(enclosingNode, comment);
return true;
}
break;
}
return false;
}
// Handle dangling comments on clauses within IfStatements that contain empty bodies. The logical place to put them
// would otherwise be trailing comments of the clause itself, which is not correct.
function handleDanglingIfStatementsWithNoBodies(precedingNode: luaparse.Node,
enclosingNode: luaparse.Node, comment: luaparse.Comment) {
if (!precedingNode || !enclosingNode) {
return false;
}
if (enclosingNode.type !== 'IfStatement') {
return false;
}
switch (precedingNode.type) {
case 'IfClause':
case 'ElseifClause':
case 'ElseClause':
if (precedingNode.body.length === 0) {
addDanglingStatementComment(precedingNode, comment);
return true;
}
break;
}
return false;
} | the_stack |
import { Observable, of, concat, throwError } from 'rxjs';
import { filter, tap, mergeMap } from 'rxjs/operators';
import { Event } from '../../event/event.interface';
import { Marbles } from '../../+internal/testing';
import { NamedError } from '../../+internal/utils';
import { act } from './act.operator';
describe('#act operator', () => {
test('emits output event from async (Observable) call function', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(event => of(event)),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
]);
});
test('emits multiple output events', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act((event: any) => of(event).pipe(
mergeMap(e => e.payload === 2
? concat(of(e), of(e))
: of(e)),
)),
);
// then
Marbles.assertEffect(effect$, [
['-a-b----c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-(bb)-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
]);
});
test('emits filtered output event', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act((event: any) => of(event).pipe(
filter(e => e.payload >= 2),
)),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['---b-c-d---', { b: event2, c: event3, d: event4 }],
]);
});
test('catches errored event => maps to default => continues stream', () => {
// given
const error = { name: 'TestError', message: 'TestErrorMessage' };
const event1: Event = { type: 'TEST_EVENT_1', payload: 1, metadata: { replyTo: 'channel_1' } };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2, metadata: { replyTo: 'channel_2' } };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3, metadata: { replyTo: 'channel_3' } };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4, metadata: { replyTo: 'channel_4' } };
const event_error: Event = { type: 'TEST_EVENT_2_UNHANDLED_ERROR', error, metadata: { replyTo: 'channel_2' } };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act((event: any) => of(event).pipe(
tap(e => { if (e.payload === 2) throw new NamedError(error.name, error.message); }),
)),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event1, b: event_error, c: event3, d: event4 }],
]);
});
test('catches errored event => maps to custom event => continues stream', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
const event_error: Event = { type: 'TEST_ERROR', error: 'something went wrong' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(
(event: any) => of(event).pipe(
tap(e => { if (e.payload === 2) throw new Error('something went wrong'); }),
),
(error: Error) => ({
type: 'TEST_ERROR',
error: error.message,
}),
),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event1, b: event_error, c: event3, d: event4 }],
]);
});
test('catches errored event => maps to custom event (based on errored one) => continues stream', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
const event_error: Event = { type: 'TEST_EVENT_2', error: 'something went wrong' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(
(event: any) => of(event).pipe(
tap(e => { if (e.payload === 2) throw new Error('something went wrong'); }),
),
(error: Error, event: Event) => ({
type: event.type,
error: error.message,
}),
),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event1, b: event_error, c: event3, d: event4 }],
]);
});
test('catches errored event => maps to observable of new events => continues stream', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
const event_error: Event = { type: 'TEST_ERROR' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(
(event: any) => of(event).pipe(
tap(e => { if (e.payload === 2) throw new Error('something went wrong'); }),
),
() => concat(
of(event_error),
of(event_error),
),
),
);
// then
Marbles.assertEffect(effect$, [
['-a-b----c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-(bb)-c-d---', { a: event1, b: { ...event_error, error: true }, c: event3, d: event4 }],
]);
});
test('catches errored event when thrown outside stream => maps to observable of new events => continues stream', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
const event_error: Event = { type: 'TEST_ERROR' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(
(event: Event) => {
if (event.payload === 2) throw new Error('something went wrong');
else return of(event);
},
() => concat(
of(event_error),
of(event_error),
),
),
);
// then
Marbles.assertEffect(effect$, [
['-a-b----c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-(bb)-c-d---', { a: event1, b: { ...event_error, error: true }, c: event3, d: event4 }],
]);
});
test('passes through errored event', () => {
// given
const event1: Event = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event = { type: 'TEST_EVENT_2', error: 'some_error' };
const event3: Event = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event = { type: 'TEST_EVENT_4', payload: 4 };
const event_ok: Event = { type: 'TEST_EVENT_PROCESSED', payload: 0 };
const event_err: Event = { type: 'TEST_EVENT_2_UNHANDLED_ERROR', error: 'some_error' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(_ => of(event_ok)),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event_ok, b: event_err, c: event_ok, d: event_ok }],
]);
});
test('chains "act" operators', () => {
// given
const event1: Event<number> = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event<number, string> = { type: 'TEST_EVENT_2', error: 'some_error' };
const event3: Event<number> = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event<number> = { type: 'TEST_EVENT_4', payload: 4 };
const event_ok: Event<boolean> = { type: 'TEST_EVENT_PROCESSED', payload: true };
const event_ok_out: Event<number> = { type: 'TEST_EVENT_PROCESSED', payload: 0 };
const event_err1: Event<string> = { type: 'TEST_EVENT_ERROR', payload: 'test_1' };
const event_err2: Event<string> = { type: 'TEST_EVENT_ERROR', payload: 'test_2' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(_ => of(event_ok), _ => event_err1),
act(_ => of(event_ok_out), _ => event_err2),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event_ok_out, b: { ...event_err2, error: true }, c: event_ok_out, d: event_ok_out }],
]);
});
test('triggers subsequent "act" error handler if previous "act" operator did not handled the exception', () => {
// given
const event1: Event<number> = { type: 'TEST_EVENT_1', payload: 1 };
const event2: Event<number> = { type: 'TEST_EVENT_2', payload: 2 };
const event3: Event<number> = { type: 'TEST_EVENT_3', payload: 3 };
const event4: Event<number> = { type: 'TEST_EVENT_4', payload: 4 };
const event_ok: Event<boolean> = { type: 'TEST_EVENT_PROCESSED', payload: true };
const event_err: Event<string> = { type: 'TEST_EVENT_ERROR', payload: 'test_error' };
// when
const effect$ = (event$: Observable<Event>) =>
event$.pipe(
act(e => e.payload === 2 ? throwError(() => 'test_error') : of(e as Event<number>)),
act(_ => of(event_ok), (error, event) => {
expect(error).toEqual('test_error');
expect(event).toEqual({ type: 'TEST_EVENT_2_UNHANDLED_ERROR', error: 'test_error' });
return event_err;
}),
);
// then
Marbles.assertEffect(effect$, [
['-a-b-c-d---', { a: event1, b: event2, c: event3, d: event4 }],
['-a-b-c-d---', { a: event_ok, b: { ...event_err, error: true }, c: event_ok, d: event_ok }],
]);
});
}); | the_stack |
import {ChildProcess} from "child_process"
import spawn from "cross-spawn"
import express from "express"
import {existsSync, readFileSync, unlinkSync, writeFileSync} from "fs"
import {writeFile} from "fs-extra"
import getPort from "get-port"
import http from "http"
// `next` here is the symlink in `test/node_modules/next` which points to the root directory.
// This is done so that requiring from `next` works.
// The reason we don't import the relative path `../../dist/<etc>` is that it would lead to inconsistent module singletons
// import server from "next/dist/server/next"
import _pkg from "next/package.json"
import fetch from "node-fetch"
import path from "path"
import qs from "querystring"
import treeKill from "tree-kill"
// export const nextServer = server
export const pkg = _pkg
// polyfill Object.fromEntries for the test/integration/relay-analytics tests
// on node 10, this can be removed after we no longer support node 10
if (!Object.fromEntries) {
Object.fromEntries = require("core-js/features/object/from-entries")
}
export function initBlitzServerScript(
scriptPath: string,
successRegexp: RegExp,
env: Record<any, any>,
failRegexp: RegExp,
opts?: {
onStdout?: (stdout: string) => void
onStderr?: (stderr: string) => void
},
) {
return new Promise((resolve, reject) => {
const instance = spawn("node", ["--no-deprecation", scriptPath], {env: env as any})
function handleStdout(data: Buffer) {
const message = data.toString()
if (successRegexp.test(message)) {
resolve(instance)
}
process.stdout.write(message)
if (opts && opts.onStdout) {
opts.onStdout(message.toString())
}
}
function handleStderr(data: Buffer) {
const message = data.toString()
if (failRegexp && failRegexp.test(message)) {
instance.kill()
return reject(new Error("received failRegexp"))
}
process.stderr.write(message)
if (opts && opts.onStderr) {
opts.onStderr(message.toString())
}
}
instance.stdout.on("data", handleStdout)
instance.stderr.on("data", handleStderr)
instance.on("close", () => {
instance.stdout.removeListener("data", handleStdout)
instance.stderr.removeListener("data", handleStderr)
})
instance.on("error", (err) => {
reject(err)
})
})
}
export function renderViaAPI(app: any, pathname: string, query: Record<any, any>) {
const url = `${pathname}${query ? `?${qs.stringify(query)}` : ""}`
return app.renderToHTML({url}, {}, pathname, query)
}
export async function renderViaHTTP(
appPort: number,
pathname: string,
query?: Record<any, any>,
opts?: Record<any, any>,
) {
return fetchViaHTTP(appPort, pathname, query, opts).then((res) => res.text())
}
export function fetchViaHTTP(
appPort: number,
pathname: string,
query?: Record<any, any>,
opts?: Record<any, any>,
) {
const url = `http://localhost:${appPort}${pathname}${
typeof query === "string" ? query : query ? `?${qs.stringify(query)}` : ""
}`
return fetch(url, opts)
}
export function findPort() {
return getPort()
}
interface RunBlitzCommandOptions {
cwd?: string
env?: Record<any, any>
spawnOptions?: any
instance?: any
stderr?: boolean
stdout?: boolean
ignoreFail?: boolean
}
export function runBlitzCommand(argv: any[], options: RunBlitzCommandOptions = {}) {
const blitzDir = path.dirname(require.resolve("blitz/package"))
const blitzBin = path.join(blitzDir, "bin/blitz")
const cwd = options.cwd || blitzDir
// Let Next.js decide the environment
const env = {
...process.env,
...options.env,
NODE_ENV: "",
__NEXT_TEST_MODE: "true",
}
return new Promise<any>((resolve, reject) => {
console.log(`Running command "blitz ${argv.join(" ")}"`)
const instance = spawn("node", ["--no-deprecation", blitzBin, ...argv], {
...options.spawnOptions,
cwd,
env,
stdio: ["ignore", "pipe", "pipe"],
})
if (typeof options.instance === "function") {
options.instance(instance)
}
let stderrOutput = ""
instance.stderr?.on("data", function (chunk) {
stderrOutput += chunk
})
let stdoutOutput = ""
if (options.stdout) {
instance.stdout?.on("data", function (chunk) {
stdoutOutput += chunk
})
}
instance.on("close", (code, signal) => {
if (!options.stderr && !options.stdout && !options.ignoreFail && code !== 0) {
console.log(stderrOutput)
return reject(new Error(`command failed with code ${code}`))
}
resolve({
code,
signal,
stdout: stdoutOutput,
stderr: stderrOutput,
})
})
instance.on("error", (err: any) => {
console.log(stderrOutput)
err.stdout = stdoutOutput
err.stderr = stderrOutput
reject(err)
})
})
}
interface RunBlitzLaunchOptions {
cwd?: string
env?: Record<any, any>
onStdout?: (stdout: string) => void
onStderr?: (stderr: string) => void
stderr?: boolean
stdout?: boolean
blitzStart?: boolean
}
export function runBlitzLaunchCommand(
argv: any[],
stdOut: unknown,
opts: RunBlitzLaunchOptions = {},
) {
const blitzDir = path.dirname(require.resolve("blitz/package"))
const blitzBin = path.join(blitzDir, "bin/blitz")
const cwd = opts.cwd ?? path.dirname(require.resolve("blitz/package"))
console.log(cwd)
const env = {
...process.env,
NODE_ENV: undefined,
__NEXT_TEST_MODE: "true",
...opts.env,
}
const command = opts.blitzStart ? "start" : "dev"
console.log(`Running command "blitz ${command}" `)
return new Promise<void | string | ChildProcess>((resolve, reject) => {
const instance = spawn("node", ["--no-deprecation", blitzBin, command, ...argv], {cwd, env})
let didResolve = false
function handleStdout(data: Buffer) {
const message = data.toString()
const bootupMarkers = {
dev: /compiled successfully/i,
start: /started server/i,
}
if (bootupMarkers[opts.blitzStart || stdOut ? "start" : "dev"].test(message)) {
if (!didResolve) {
didResolve = true
resolve(stdOut ? message : instance)
}
}
if (typeof opts.onStdout === "function") {
opts.onStdout(message)
}
if (opts.stdout !== false) {
process.stdout.write(message)
}
}
function handleStderr(data: Buffer) {
const message = data.toString()
if (typeof opts.onStderr === "function") {
opts.onStderr(message)
}
if (opts.stderr !== false) {
process.stderr.write(message)
}
}
instance.stdout.on("data", handleStdout)
instance.stderr.on("data", handleStderr)
instance.on("close", () => {
instance.stdout.removeListener("data", handleStdout)
instance.stderr.removeListener("data", handleStderr)
if (!didResolve) {
didResolve = true
resolve()
}
})
instance.on("error", (err) => {
reject(err)
})
})
}
// Launch the app in dev mode.
export function launchApp(dir: string, port: number, opts: RunBlitzLaunchOptions = {}) {
return runBlitzLaunchCommand(["-p", port], undefined, {cwd: dir, ...opts})
}
export function blitzBuild(dir: string, args = [], opts: RunBlitzCommandOptions = {}) {
return runBlitzCommand(["build", ...args], {cwd: dir, ...opts})
}
export function blitzExport(dir: string, {outdir}, opts: RunBlitzCommandOptions = {}) {
return runBlitzCommand(["export", "--outdir", outdir], {cwd: dir, ...opts})
}
export function blitzExportDefault(dir: string, opts: RunBlitzCommandOptions = {}) {
return runBlitzCommand(["export"], {cwd: dir, ...opts})
}
export function blitzStart(dir: string, port: number, opts: RunBlitzLaunchOptions = {}) {
return runBlitzLaunchCommand(["-p", port], undefined, {
cwd: dir,
...opts,
blitzStart: true,
})
}
export function buildTS(args = [], cwd: string, env = {}) {
cwd = cwd || path.dirname(require.resolve("@blitzjs/cli/package"))
env = {...process.env, NODE_ENV: undefined, ...env}
return new Promise<void>((resolve, reject) => {
const instance = spawn(
"node",
["--no-deprecation", require.resolve("typescript/lib/tsc"), ...args],
{cwd, env},
)
let output = ""
const handleData = (chunk) => {
output += chunk.toString()
}
instance.stdout.on("data", handleData)
instance.stderr.on("data", handleData)
instance.on("exit", (code) => {
if (code) {
return reject(new Error("exited with code: " + code + "\n" + output))
}
resolve()
})
})
}
// Kill a launched app
export async function killApp(instance) {
await new Promise<void>((resolve, reject) => {
treeKill(instance.pid, (err) => {
if (err) {
if (
process.platform === "win32" &&
typeof err.message === "string" &&
(err.message.includes(`no running instance of the task`) ||
err.message.includes(`not found`))
) {
// Windows throws an error if the process is already dead
//
// Command failed: taskkill /pid 6924 /T /F
// ERROR: The process with PID 6924 (child process of PID 6736) could not be terminated.
// Reason: There is no running instance of the task.
return resolve()
}
return reject(err)
}
resolve()
})
})
}
export async function startApp(app: any) {
await app.prepare()
const handler = app.getRequestHandler()
const server = http.createServer(handler)
;(server as any).__app = app
await promiseCall(server, "listen")
return server
}
export async function stopApp(server: any) {
if (server.__app) {
await server.__app.close()
}
await promiseCall(server, "close")
}
export function promiseCall(obj: any, method: any, ...args: any[]) {
return new Promise((resolve, reject) => {
const newArgs = [
...args,
function (err: any, res: any) {
if (err) return reject(err)
resolve(res)
},
]
obj[method](...newArgs)
})
}
export function waitFor(millis: number) {
return new Promise((resolve) => setTimeout(resolve, millis))
}
export async function startStaticServer(dir: string) {
const app = express()
const server = http.createServer(app)
app.use(express.static(dir))
await promiseCall(server, "listen")
return server
}
export async function startCleanStaticServer(dir: string) {
const app = express()
const server = http.createServer(app)
app.use(express.static(dir, {extensions: ["html"]}))
await promiseCall(server, "listen")
return server
}
// check for content in 1 second intervals timing out after
// 30 seconds
export async function check(contentFn: Function, regex: RegExp, hardError = true) {
let content: any
let lastErr: any
for (let tries = 0; tries < 30; tries++) {
try {
content = await contentFn()
if (typeof regex === "string") {
if (regex === content) {
return true
}
} else if (regex.test(content)) {
// found the content
return true
}
await waitFor(1000)
} catch (err) {
await waitFor(1000)
lastErr = err
}
}
console.error("TIMED OUT CHECK: ", {regex, content, lastErr})
if (hardError) {
throw new Error("TIMED OUT: " + regex + "\n\n" + content)
}
return false
}
export class File {
path: string
originalContent: any
constructor(path: string) {
this.path = path
this.originalContent = existsSync(this.path) ? readFileSync(this.path, "utf8") : null
}
write(content: any) {
if (!this.originalContent) {
this.originalContent = content
}
writeFileSync(this.path, content, "utf8")
}
replace(pattern: any, newValue: any) {
const currentContent = readFileSync(this.path, "utf8")
if (pattern instanceof RegExp) {
if (!pattern.test(currentContent)) {
throw new Error(
`Failed to replace content.\n\nPattern: ${pattern.toString()}\n\nContent: ${currentContent}`,
)
}
} else if (typeof pattern === "string") {
if (!currentContent.includes(pattern)) {
throw new Error(
`Failed to replace content.\n\nPattern: ${pattern}\n\nContent: ${currentContent}`,
)
}
} else {
throw new Error(`Unknown replacement attempt type: ${pattern}`)
}
const newContent = currentContent.replace(pattern, newValue)
this.write(newContent)
}
delete() {
unlinkSync(this.path)
}
restore() {
this.write(this.originalContent)
}
}
export async function evaluate(browser: any, input: any) {
if (typeof input === "function") {
const result = await browser.executeScript(input)
await new Promise((resolve) => setTimeout(resolve, 30))
return result
} else {
throw new Error(`You must pass a function to be evaluated in the browser.`)
}
}
export async function retry(fn: Function, duration = 3000, interval = 500, description: string) {
if (duration % interval !== 0) {
throw new Error(
`invalid duration ${duration} and interval ${interval} mix, duration must be evenly divisible by interval`,
)
}
for (let i = duration; i >= 0; i -= interval) {
try {
return await fn()
} catch (err) {
if (i === 0) {
console.error(`Failed to retry${description ? ` ${description}` : ""} within ${duration}ms`)
throw err
}
console.warn(`Retrying${description ? ` ${description}` : ""} in ${interval}ms`)
await waitFor(interval)
}
}
}
export async function hasRedbox(browser: any, expected = true) {
let attempts = 30
do {
const has = await evaluate(browser, () => {
return Boolean(
[].slice
.call(document.querySelectorAll("nextjs-portal"))
.find((p: any) =>
p.shadowRoot.querySelector(
"#nextjs__container_errors_label, #nextjs__container_build_error_label",
),
),
)
})
if (has) {
return true
}
if (--attempts < 0) {
break
}
await new Promise((resolve) => setTimeout(resolve, 1000))
} while (expected)
return false
}
export async function getRedboxHeader(browser: any) {
return retry(
() =>
evaluate(browser, () => {
const portal = [].slice
.call(document.querySelectorAll("nextjs-portal"))
.find((p: any) => p.shadowRoot.querySelector("[data-nextjs-dialog-header"))
const root = portal.shadowRoot
return root
.querySelector("[data-nextjs-dialog-header]")
.innerText.replace(/__WEBPACK_DEFAULT_EXPORT__/, "Unknown")
}),
3000,
500,
"getRedboxHeader",
)
}
export async function getRedboxSource(browser: any) {
return retry(
() =>
evaluate(browser, () => {
const portal = [].slice
.call(document.querySelectorAll("nextjs-portal"))
.find((p: any) =>
p.shadowRoot.querySelector(
"#nextjs__container_errors_label, #nextjs__container_build_error_label",
),
)
const root = portal.shadowRoot
return root
.querySelector("[data-nextjs-codeframe], [data-nextjs-terminal]")
.innerText.replace(/__WEBPACK_DEFAULT_EXPORT__/, "Unknown")
}),
3000,
500,
"getRedboxSource",
)
}
export function getBrowserBodyText(browser: any) {
return browser.eval('document.getElementsByTagName("body")[0].innerText')
}
export function normalizeRegEx(src: string) {
return new RegExp(src).source.replace(/\^\//g, "^\\/")
}
function readJson(path: string) {
return JSON.parse(readFileSync(path) as any)
}
export function getBuildManifest(dir: string) {
return readJson(path.join(dir, ".next/build-manifest.json"))
}
export function getPageFileFromBuildManifest(dir: string, page: string) {
const buildManifest = getBuildManifest(dir)
const pageFiles = buildManifest.pages[page]
if (!pageFiles) {
throw new Error(`No files for page ${page}`)
}
const pageFile = pageFiles.find(
(file: string) =>
file.endsWith(".js") && file.includes(`pages${page === "" ? "/index" : page}`),
)
if (!pageFile) {
throw new Error(`No page file for page ${page}`)
}
return pageFile
}
export function readBlitzBuildClientPageFile(appDir: string, page: string) {
const pageFile = getPageFileFromBuildManifest(appDir, page)
return readFileSync(path.join(appDir, ".next", pageFile), "utf8")
}
export function getPagesManifest(dir: string) {
const serverFile = path.join(dir, ".next/server/pages-manifest.json")
if (existsSync(serverFile)) {
return readJson(serverFile)
}
return readJson(path.join(dir, ".next/serverless/pages-manifest.json"))
}
export function updatePagesManifest(dir: string, content: any) {
const serverFile = path.join(dir, ".next/server/pages-manifest.json")
if (existsSync(serverFile)) {
return writeFile(serverFile, content)
}
return writeFile(path.join(dir, ".next/serverless/pages-manifest.json"), content)
}
export function getPageFileFromPagesManifest(dir: string, page: string) {
const pagesManifest = getPagesManifest(dir)
const pageFile = pagesManifest[page]
if (!pageFile) {
throw new Error(`No file for page ${page}`)
}
return pageFile
}
export function readBlitzBuildServerPageFile(appDir: string, page: string) {
const pageFile = getPageFileFromPagesManifest(appDir, page)
return readFileSync(path.join(appDir, ".next", "server", pageFile), "utf8")
} | the_stack |
import React, { PropsWithChildren } from 'react';
import { Pressable, Text, View } from 'react-native';
import TestRenderer from 'react-test-renderer';
import styled, { ThemeProvider } from '../';
// NOTE: These tests are like the ones for Web but a "light-version" of them
// This is mostly due to the similar logic
describe('native', () => {
it('should not throw an error when called with a valid element', () => {
expect(() => styled.View``).not.toThrowError();
const FunctionalComponent = () => <View />;
class ClassComponent extends React.Component {
render() {
return <View />;
}
}
const validComps = ['View', FunctionalComponent, ClassComponent];
validComps.forEach(comp => {
expect(() => {
const Comp = styled(comp)``;
TestRenderer.create(<Comp />);
}).not.toThrowError();
});
});
it('should generate inline styles', () => {
const Comp = styled.View``;
const wrapper = TestRenderer.create(<Comp />);
const view = wrapper.root.findByType(View);
expect(view.props.style).toEqual([{}]);
});
it('should fold successive styled() wrappings', () => {
const Comp = styled.Text`
color: red;
`;
const Comp2 = styled(Comp)`
text-align: left;
`;
const wrapper = TestRenderer.create(<Comp2 />);
const view = wrapper.root.findByType(Text);
expect(view.props.style).toEqual([{ color: 'red', textAlign: 'left' }]);
});
it('folds defaultProps', () => {
const Inner = styled.View``;
Inner.defaultProps = {
theme: {
fontSize: 12,
},
style: {
background: 'blue',
textAlign: 'center',
},
};
const Outer = styled(Inner)``;
Outer.defaultProps = {
theme: {
fontSize: 16,
},
style: {
background: 'silver',
},
};
expect(Outer.defaultProps).toMatchInlineSnapshot(`
Object {
"style": Object {
"background": "silver",
"textAlign": "center",
},
"theme": Object {
"fontSize": 16,
},
}
`);
});
it('should combine inline styles and the style prop', () => {
const Comp = styled.View`
padding-top: 10px;
`;
const style = { opacity: 0.9 };
const wrapper = TestRenderer.create(<Comp style={style} />);
const view = wrapper.root.findByType(View);
expect(view.props.style).toEqual([{ paddingTop: 10 }, style]);
});
it('should allow style function prop when available as builtin', () => {
const Comp = styled.Pressable`
padding-top: 10px;
`;
const style = () => ({ opacity: 0.9 });
const wrapper = TestRenderer.create(
<Comp style={style}>
<Text>test</Text>
</Comp>
);
const view = wrapper.root.findByType(Pressable);
expect(view.props.style()).toEqual([{ paddingTop: 10 }, style()]);
});
it('should allow style function prop when available as extended', () => {
const Comp = styled(Pressable)`
padding-top: 10px;
`;
const style = () => ({ opacity: 0.9 });
const wrapper = TestRenderer.create(
<Comp style={style}>
<Text>test</Text>
</Comp>
);
const view = wrapper.root.findByType(Pressable);
expect(view.props.style()).toEqual([{ paddingTop: 10 }, style()]);
});
it('should not console.warn if a comment is seen', () => {
const oldConsoleWarn = console.warn;
console.warn = jest.fn();
try {
// eslint-disable-next-line no-unused-expressions
styled.View`
/* this is a comment */
`;
expect(console.warn).not.toHaveBeenCalled();
} finally {
console.warn = oldConsoleWarn;
}
});
// https://github.com/styled-components/styled-components/issues/1266
it('should update when props change', () => {
const Comp = styled.View`
padding-top: 5px;
opacity: ${p => p.opacity || 0};
`;
const wrapper = TestRenderer.create(<Comp opacity={0.5} />);
expect(wrapper.root.findByType(View).props.style).toEqual([{ paddingTop: 5, opacity: 0.5 }]);
wrapper.update(<Comp opacity={0.9} />);
expect(wrapper.root.findByType(View).props.style).toEqual([{ paddingTop: 5, opacity: 0.9 }]);
});
it('should forward the "as" prop if "forwardedAs" is used', () => {
const Comp = ({ as: Component = View, ...props }) => <Component {...props} />;
const Comp2 = styled(Comp)`
background: red;
`;
const wrapper = TestRenderer.create(<Comp2 forwardedAs={Text} />);
expect(wrapper.root.findByType(Text)).not.toBeUndefined();
});
describe('attrs', () => {
beforeEach(() => jest.spyOn(console, 'warn').mockImplementation(() => {}));
it('works fine with an empty object', () => {
const Comp = styled.View.attrs(() => ({}))``;
const wrapper = TestRenderer.create(<Comp />);
const view = wrapper.root.findByType(View);
expect(view.props).toEqual({
style: [{}],
});
});
it('passes simple props on', () => {
const Comp = styled.View.attrs(() => ({
test: true,
}))``;
const wrapper = TestRenderer.create(<Comp />);
const view = wrapper.root.findByType(View);
expect(view.props).toEqual({
style: [{}],
test: true,
});
});
it('calls an attr-function with context', () => {
const Comp = styled.View.attrs(p => ({
copy: p.test,
}))``;
const test = 'Put that cookie down!';
const wrapper = TestRenderer.create(<Comp test={test} />);
const view = wrapper.root.findByType(View);
expect(view.props).toEqual({
style: [{}],
copy: test,
test,
});
});
it('merges multiple calls', () => {
const Comp = styled.View.attrs(() => ({
first: 'first',
test: '_',
})).attrs(() => ({
second: 'second',
test: 'test',
}))``;
const wrapper = TestRenderer.create(<Comp />);
const view = wrapper.root.findByType(View);
expect(view.props).toEqual({
style: [{}],
first: 'first',
second: 'second',
test: 'test',
});
});
it('merges multiple fn calls', () => {
const Comp = styled.View.attrs(() => ({
first: 'first',
test: '_',
})).attrs(() => ({
second: 'second',
test: 'test',
}))``;
const wrapper = TestRenderer.create(<Comp />);
const view = wrapper.root.findByType(View);
expect(view.props).toEqual({
style: [{}],
first: 'first',
second: 'second',
test: 'test',
});
});
it('merges attrs when inheriting SC', () => {
const Parent = styled.View.attrs(() => ({
first: 'first',
}))``;
const Child = styled(Parent).attrs(() => ({
second: 'second',
}))``;
const wrapper = TestRenderer.create(<Child />);
const view = wrapper.root.findByType(View);
expect(view.props).toMatchObject({
style: [{}],
first: 'first',
second: 'second',
});
});
it('should pass through children as a normal prop', () => {
const Comp = styled.Text.attrs(() => ({
children: 'Probably a bad idea',
}))``;
const wrapper = TestRenderer.create(<Comp />);
const text = wrapper.root.findByType(Text);
expect(text.props).toMatchObject({
children: 'Probably a bad idea',
style: [{}],
});
});
it('should pass through complex children as well', () => {
const child = <Text>Probably a bad idea</Text>;
const Comp = styled.Text.attrs(() => ({
children: child,
}))``;
const wrapper = TestRenderer.create(<Comp />);
const text = wrapper.root.findByType(Text);
expect(text.props).toMatchObject({
children: child,
style: [{}],
});
});
it('should override children', () => {
const child = <Text>Amazing</Text>;
const Comp = styled.Text.attrs(() => ({
children: child,
}))``;
const wrapper = TestRenderer.create(<Comp>Something else</Comp>);
const text = wrapper.root.findByType(Text);
expect(text.props).toMatchObject({
children: child,
style: [{}],
});
});
it('accepts a function', () => {
const child = <Text>Amazing</Text>;
const Comp = styled.Text.attrs(() => ({
children: child,
}))``;
const wrapper = TestRenderer.create(<Comp>Something else</Comp>);
const text = wrapper.root.findByType(Text);
expect(text.props).toMatchObject({
children: child,
style: [{}],
});
});
it('function form allows access to theme', () => {
const Comp = styled.Text.attrs(props => ({
'data-color': props.theme.color,
}))``;
const wrapper = TestRenderer.create(
<ThemeProvider theme={{ color: 'red' }}>
<Comp>Something else</Comp>
</ThemeProvider>
);
const text = wrapper.root.findByType(Text);
expect(text.props).toMatchObject({
children: 'Something else',
'data-color': 'red',
style: [{}],
});
});
it('theme prop works', () => {
const Comp = styled.Text`
color: ${({ theme }) => theme.myColor};
`;
const wrapper = TestRenderer.create(<Comp theme={{ myColor: 'red' }}>Something else</Comp>);
const text = wrapper.root.findByType(Text);
expect(text.props.style).toMatchObject([{ color: 'red' }]);
});
it('theme in defaultProps works', () => {
const Comp = styled.Text`
color: ${({ theme }) => theme.myColor};
`;
Comp.defaultProps = { theme: { myColor: 'red' } };
const wrapper = TestRenderer.create(<Comp>Something else</Comp>);
const text = wrapper.root.findByType(Text);
expect(text.props.style).toMatchObject([{ color: 'red' }]);
});
});
describe('expanded API', () => {
it('should attach a displayName', () => {
const Dummy = (props: PropsWithChildren<{}>) => <View {...props} />;
Dummy.displayName = 'Dummy';
const Comp = styled(Dummy)``;
expect(Comp.displayName).toBe('Styled(Dummy)');
const CompTwo = styled.View.withConfig({ displayName: 'Test' })``;
expect(CompTwo.displayName).toBe('Test');
});
it('should allow multiple calls to be chained', () => {
const Comp = styled.View.withConfig({ displayName: 'Test1' }).withConfig({
displayName: 'Test2',
})``;
expect(Comp.displayName).toBe('Test2');
});
it('withComponent should work', () => {
const Dummy = (props: PropsWithChildren<{}>) => <View {...props} />;
const Comp = styled.View.withConfig({
displayName: 'Comp',
})``.withComponent(Text);
const Comp2 = styled.View.withConfig({
displayName: 'Comp2',
})``.withComponent(Dummy);
expect(TestRenderer.create(<Comp />).toJSON()).toMatchInlineSnapshot(`
<Text
style={
Array [
Object {},
]
}
/>
`);
expect(TestRenderer.create(<Comp2 />).toJSON()).toMatchInlineSnapshot(`
<View
style={
Array [
Object {},
]
}
/>
`);
});
it('"as" prop should change the rendered element without affecting the styling', () => {
// @ts-expect-error foo is expected later in the test
const OtherText = (props: PropsWithChildren<{}>) => <Text {...props} foo />;
const Comp = styled.Text`
color: red;
`;
const wrapper = TestRenderer.create(<Comp as={OtherText} />);
const view = wrapper.root.findByType(Text);
expect(view.props).toHaveProperty('foo');
expect(view.props.style).toEqual([{ color: 'red' }]);
});
it('should omit transient props', () => {
const Comp = styled.Text`
color: ${p => p.$color};
`;
expect(TestRenderer.create(<Comp $color="red" />).toJSON()).toMatchInlineSnapshot(`
<Text
style={
Array [
Object {
"color": "red",
},
]
}
/>
`);
});
it('allows for custom prop filtering for elements', () => {
const Comp = styled('View').withConfig({
shouldForwardProp: prop => !['filterThis'].includes(prop),
})`
color: red;
`;
const wrapper = TestRenderer.create(<Comp filterThis="abc" passThru="def" />);
// @ts-expect-error bs error
const { props } = wrapper.root.findByType('View');
expect(props.style).toEqual([{ color: 'red' }]);
expect(props.passThru).toBe('def');
expect(props.filterThis).toBeUndefined();
});
it('allows custom prop filtering for components', () => {
const InnerComp = (props: PropsWithChildren<{}>) => <View {...props} />;
const Comp = styled(InnerComp).withConfig({
shouldForwardProp: prop => !['filterThis'].includes(prop),
})`
color: red;
`;
const wrapper = TestRenderer.create(<Comp filterThis="abc" passThru="def" />);
// @ts-expect-error bs error
const { props } = wrapper.root.findByType('View');
expect(props.style).toEqual([{ color: 'red' }]);
expect(props.passThru).toBe('def');
expect(props.filterThis).toBeUndefined();
});
it('composes shouldForwardProp on composed styled components', () => {
const StyledView = styled.View.withConfig({
shouldForwardProp: prop => prop === 'passThru',
})`
color: red;
`;
const ComposedView = styled(StyledView).withConfig({
shouldForwardProp: () => true,
})``;
const wrapper = TestRenderer.create(<ComposedView filterThis passThru />);
const { props } = wrapper.root.findByType(View);
expect(props.passThru).toBeDefined();
expect(props.filterThis).toBeUndefined();
});
it('shouldForwardProp argument signature', () => {
const stub = jest.fn(() => true);
const StyledView = styled.View.withConfig({
shouldForwardProp: stub,
})`
color: red;
`;
TestRenderer.create(<StyledView something />);
expect(stub.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
"something",
[Function],
[Function],
],
]
`);
// element being created
// @ts-expect-error bad types
expect(stub.mock.calls[0][2]).toEqual(View);
});
it('should filter out props when using "as" to a custom component', () => {
const AsComp = (props: PropsWithChildren<{}>) => <View {...props} />;
const Comp = styled.View.withConfig({
shouldForwardProp: prop => !['filterThis'].includes(prop),
})`
color: red;
`;
const wrapper = TestRenderer.create(<Comp as={AsComp} filterThis="abc" passThru="def" />);
const { props } = wrapper.root.findByType(AsComp);
expect(props.style).toEqual([{ color: 'red' }]);
expect(props.passThru).toBe('def');
expect(props.filterThis).toBeUndefined();
});
it('can set computed styles based on props that are being filtered out', () => {
const AsComp = (props: PropsWithChildren<{}>) => <View {...props} />;
const Comp = styled.View.withConfig({
shouldForwardProp: prop => !['filterThis'].includes(prop),
})`
color: ${props => (props.filterThis === 'abc' ? 'red' : undefined)};
`;
const wrapper = TestRenderer.create(<Comp as={AsComp} filterThis="abc" passThru="def" />);
const { props } = wrapper.root.findByType(AsComp);
expect(props.style).toEqual([{ color: 'red' }]);
expect(props.passThru).toBe('def');
expect(props.filterThis).toBeUndefined();
});
it('should filter our props when using "as" to a different element', () => {
const Comp = styled.View.withConfig({
shouldForwardProp: prop => !['filterThis'].includes(prop),
})`
color: red;
`;
const wrapper = TestRenderer.create(<Comp as="a" filterThis="abc" passThru="def" />);
const { props } = wrapper.root.findByType('a');
expect(props.style).toEqual([{ color: 'red' }]);
expect(props.passThru).toBe('def');
expect(props.filterThis).toBeUndefined();
});
it('should prefer transient $as over as', () => {
const OtherText = (props: PropsWithChildren<{}>) => <Text {...props} />;
const Comp = styled.Text`
color: red;
`;
const wrapper = TestRenderer.create(<Comp $as="View" as={OtherText} />);
// @ts-expect-error bs error
const view = wrapper.root.findByType('View');
expect(view.props.style).toEqual([{ color: 'red' }]);
expect(() => wrapper.root.findByType(Text)).toThrowError();
});
});
}); | the_stack |
* @packageDocumentation
* @module particle
*/
import { ccclass, tooltip, displayOrder, type, formerlySerializedAs, serializable, visible, range } from 'cc.decorator';
import { Mat4, Quat, Vec2, Vec3, clamp, pingPong, random, randomRange, repeat, toDegree, toRadian } from '../../core/math';
import CurveRange from '../animator/curve-range';
import { ArcMode, EmitLocation, ShapeType } from '../enum';
import { fixedAngleUnitVector2, particleEmitZAxis, randomPointBetweenCircleAtFixedAngle, randomPointBetweenSphere,
randomPointInCube, randomSign, randomSortArray, randomUnitVector } from '../particle-general-function';
import { ParticleSystem } from '../particle-system';
const _intermediVec = new Vec3(0, 0, 0);
const _intermediArr: number[] = [];
const _unitBoxExtent = new Vec3(0.5, 0.5, 0.5);
@ccclass('cc.ShapeModule')
export default class ShapeModule {
/**
* @zh 粒子发射器位置。
*/
@displayOrder(13)
@tooltip('i18n:shapeModule.position')
get position () {
return this._position;
}
set position (val) {
this._position = val;
this.constructMat();
}
/**
* @zh 粒子发射器旋转角度。
*/
@displayOrder(14)
@tooltip('i18n:shapeModule.rotation')
get rotation () {
return this._rotation;
}
set rotation (val) {
this._rotation = val;
this.constructMat();
}
/**
* @zh 粒子发射器缩放比例。
*/
@displayOrder(15)
@tooltip('i18n:shapeModule.scale')
get scale () {
return this._scale;
}
set scale (val) {
this._scale = val;
this.constructMat();
}
/**
* @zh 粒子发射器在一个扇形范围内发射。
*/
@displayOrder(6)
@tooltip('i18n:shapeModule.arc')
get arc () {
return toDegree(this._arc);
}
set arc (val) {
this._arc = toRadian(val);
}
/**
* @zh 圆锥的轴与母线的夹角<bg>。
* 决定圆锥发射器的开合程度。
*/
@displayOrder(5)
@tooltip('i18n:shapeModule.angle')
get angle () {
return Math.round(toDegree(this._angle) * 100) / 100;
}
set angle (val) {
this._angle = toRadian(val);
}
@serializable
private _enable = false;
/**
* @zh 是否启用。
*/
@displayOrder(0)
public get enable () {
return this._enable;
}
public set enable (val) {
this._enable = val;
}
/**
* @zh 粒子发射器类型 [[ShapeType]]。
*/
@type(ShapeType)
@formerlySerializedAs('shapeType')
@displayOrder(1)
public _shapeType = ShapeType.Cone;
@type(ShapeType)
@tooltip('i18n:shapeModule.shapeType')
public get shapeType () {
return this._shapeType;
}
public set shapeType (val) {
this._shapeType = val;
switch (this._shapeType) {
case ShapeType.Box:
if (this.emitFrom === EmitLocation.Base) {
this.emitFrom = EmitLocation.Volume;
}
break;
case ShapeType.Cone:
if (this.emitFrom === EmitLocation.Edge) {
this.emitFrom = EmitLocation.Base;
}
break;
case ShapeType.Sphere:
case ShapeType.Hemisphere:
if (this.emitFrom === EmitLocation.Base || this.emitFrom === EmitLocation.Edge) {
this.emitFrom = EmitLocation.Volume;
}
break;
default:
break;
}
}
/**
* @zh 粒子从发射器哪个部位发射 [[EmitLocation]]。
*/
@type(EmitLocation)
@serializable
@displayOrder(2)
@tooltip('i18n:shapeModule.emitFrom')
public emitFrom = EmitLocation.Volume;
/**
* @zh 根据粒子的初始方向决定粒子的移动方向。
*/
@serializable
@displayOrder(16)
@tooltip('i18n:shapeModule.alignToDirection')
public alignToDirection = false;
/**
* @zh 粒子生成方向随机设定。
*/
@serializable
@displayOrder(17)
@tooltip('i18n:shapeModule.randomDirectionAmount')
public randomDirectionAmount = 0;
/**
* @zh 表示当前发射方向与当前位置到结点中心连线方向的插值。
*/
@serializable
@displayOrder(18)
@tooltip('i18n:shapeModule.sphericalDirectionAmount')
public sphericalDirectionAmount = 0;
/**
* @zh 粒子生成位置随机设定(设定此值为非 0 会使粒子生成位置超出生成器大小范围)。
*/
@serializable
@displayOrder(19)
@tooltip('i18n:shapeModule.randomPositionAmount')
public randomPositionAmount = 0;
/**
* @zh 粒子发射器半径。
*/
@serializable
@displayOrder(3)
@tooltip('i18n:shapeModule.radius')
public radius = 1;
/**
* @zh 粒子发射器发射位置(对 Box 类型的发射器无效):<bg>
* - 0 表示从表面发射;
* - 1 表示从中心发射;
* - 0 ~ 1 之间表示在中心到表面之间发射。
*/
@serializable
@displayOrder(4)
@tooltip('i18n:shapeModule.radiusThickness')
public radiusThickness = 1;
/**
* @zh 粒子在扇形范围内的发射方式 [[ArcMode]]。
*/
@type(ArcMode)
@serializable
@displayOrder(7)
@tooltip('i18n:shapeModule.arcMode')
public arcMode = ArcMode.Random;
/**
* @zh 控制可能产生粒子的弧周围的离散间隔。
*/
@visible(function noArc (this: ShapeModule) { return this.arcMode !== ArcMode.Random; }) // Bug fix: Hide this input when arcMode is random
@serializable
@displayOrder(9)
@tooltip('i18n:shapeModule.arcSpread')
public arcSpread = 0;
/**
* @zh 粒子沿圆周发射的速度。
*/
@type(CurveRange)
@visible(function noArc (this: ShapeModule) { return this.arcMode !== ArcMode.Random; }) // Bug fix: Hide this input when arcMode is random
@range([0, 1])
@serializable
@displayOrder(10)
@tooltip('i18n:shapeModule.arcSpeed')
public arcSpeed = new CurveRange();
/**
* @zh 圆锥顶部截面距离底部的轴长<bg>。
* 决定圆锥发射器的高度。
*/
@serializable
@displayOrder(11)
@tooltip('i18n:shapeModule.length')
public length = 5;
/**
* @zh 粒子发射器发射位置(针对 Box 类型的粒子发射器)。
*/
@serializable
@displayOrder(12)
@tooltip('i18n:shapeModule.boxThickness')
public boxThickness = new Vec3(0, 0, 0);
@serializable
private _position = new Vec3(0, 0, 0);
@serializable
private _rotation = new Vec3(0, 0, 0);
@serializable
private _scale = new Vec3(1, 1, 1);
@serializable
private _arc = toRadian(360);
@serializable
private _angle = toRadian(25);
private mat: Mat4;
private quat: Quat;
private particleSystem: any;
private lastTime: number;
private totalAngle: number;
constructor () {
this.mat = new Mat4();
this.quat = new Quat();
this.particleSystem = null;
this.lastTime = 0;
this.totalAngle = 0;
}
public onInit (ps: ParticleSystem) {
this.particleSystem = ps;
this.constructMat();
this.lastTime = this.particleSystem._time;
}
public emit (p) {
switch (this.shapeType) {
case ShapeType.Box:
boxEmit(this.emitFrom, this.boxThickness, p.position, p.velocity);
break;
case ShapeType.Circle:
circleEmit(this.radius, this.radiusThickness, this.generateArcAngle(), p.position, p.velocity);
break;
case ShapeType.Cone:
coneEmit(this.emitFrom, this.radius, this.radiusThickness, this.generateArcAngle(), this._angle, this.length, p.position, p.velocity);
break;
case ShapeType.Sphere:
sphereEmit(this.emitFrom, this.radius, this.radiusThickness, p.position, p.velocity);
break;
case ShapeType.Hemisphere:
hemisphereEmit(this.emitFrom, this.radius, this.radiusThickness, p.position, p.velocity);
break;
default:
console.warn(`${this.shapeType} shapeType is not supported by ShapeModule.`);
}
if (this.randomPositionAmount > 0) {
p.position.x += randomRange(-this.randomPositionAmount, this.randomPositionAmount);
p.position.y += randomRange(-this.randomPositionAmount, this.randomPositionAmount);
p.position.z += randomRange(-this.randomPositionAmount, this.randomPositionAmount);
}
Vec3.transformQuat(p.velocity, p.velocity, this.quat);
Vec3.transformMat4(p.position, p.position, this.mat);
if (this.sphericalDirectionAmount > 0) {
const sphericalVel = Vec3.normalize(_intermediVec, p.position);
Vec3.lerp(p.velocity, p.velocity, sphericalVel, this.sphericalDirectionAmount);
}
this.lastTime = this.particleSystem._time;
}
private constructMat () {
Quat.fromEuler(this.quat, this._rotation.x, this._rotation.y, this._rotation.z);
Mat4.fromRTS(this.mat, this.quat, this._position, this._scale);
}
private generateArcAngle () {
if (this.arcMode === ArcMode.Random) {
return randomRange(0, this._arc);
}
let angle = this.totalAngle + 2 * Math.PI * this.arcSpeed.evaluate(this.particleSystem._time, 1)! * (this.particleSystem._time - this.lastTime);
this.totalAngle = angle;
if (this.arcSpread !== 0) {
angle = Math.floor(angle / (this._arc * this.arcSpread)) * this._arc * this.arcSpread;
}
switch (this.arcMode) {
case ArcMode.Loop:
return repeat(angle, this._arc);
case ArcMode.PingPong:
return pingPong(angle, this._arc);
default:
return repeat(angle, this._arc);
}
}
}
function sphereEmit (emitFrom, radius, radiusThickness, pos, dir) {
switch (emitFrom) {
case EmitLocation.Volume:
randomPointBetweenSphere(pos, radius * (1 - radiusThickness), radius);
Vec3.normalize(dir, pos);
break;
case EmitLocation.Shell:
randomUnitVector(pos);
Vec3.multiplyScalar(pos, pos, radius);
Vec3.normalize(dir, pos);
break;
default:
console.warn(`${emitFrom} is not supported for sphere emitter.`);
}
}
function hemisphereEmit (emitFrom, radius, radiusThickness, pos, dir) {
switch (emitFrom) {
case EmitLocation.Volume:
randomPointBetweenSphere(pos, radius * (1 - radiusThickness), radius);
if (pos.z > 0) {
pos.z *= -1;
}
Vec3.normalize(dir, pos);
break;
case EmitLocation.Shell:
randomUnitVector(pos);
Vec3.multiplyScalar(pos, pos, radius);
if (pos.z > 0) {
pos.z *= -1;
}
Vec3.normalize(dir, pos);
break;
default:
console.warn(`${emitFrom} is not supported for hemisphere emitter.`);
}
}
function coneEmit (emitFrom, radius, radiusThickness, theta, angle, length, pos, dir) {
switch (emitFrom) {
case EmitLocation.Base:
randomPointBetweenCircleAtFixedAngle(pos, radius * (1 - radiusThickness), radius, theta);
Vec2.multiplyScalar(dir, pos, Math.sin(angle));
dir.z = -Math.cos(angle) * radius;
Vec3.normalize(dir, dir);
pos.z = 0;
break;
case EmitLocation.Shell:
fixedAngleUnitVector2(pos, theta);
Vec2.multiplyScalar(dir, pos, Math.sin(angle));
dir.z = -Math.cos(angle);
Vec3.normalize(dir, dir);
Vec2.multiplyScalar(pos, pos, radius);
pos.z = 0;
break;
case EmitLocation.Volume:
randomPointBetweenCircleAtFixedAngle(pos, radius * (1 - radiusThickness), radius, theta);
Vec2.multiplyScalar(dir, pos, Math.sin(angle));
dir.z = -Math.cos(angle) * radius;
Vec3.normalize(dir, dir);
pos.z = 0;
Vec3.add(pos, pos, Vec3.multiplyScalar(_intermediVec, dir, length * random() / -dir.z));
break;
default:
console.warn(`${emitFrom} is not supported for cone emitter.`);
}
}
function boxEmit (emitFrom, boxThickness, pos, dir) {
switch (emitFrom) {
case EmitLocation.Volume:
randomPointInCube(pos, _unitBoxExtent);
// randomPointBetweenCube(pos, vec3.multiply(_intermediVec, _unitBoxExtent, boxThickness), _unitBoxExtent);
break;
case EmitLocation.Shell:
_intermediArr.splice(0, _intermediArr.length);
_intermediArr.push(randomRange(-0.5, 0.5));
_intermediArr.push(randomRange(-0.5, 0.5));
_intermediArr.push(randomSign() * 0.5);
randomSortArray(_intermediArr);
applyBoxThickness(_intermediArr, boxThickness);
Vec3.set(pos, _intermediArr[0], _intermediArr[1], _intermediArr[2]);
break;
case EmitLocation.Edge:
_intermediArr.splice(0, _intermediArr.length);
_intermediArr.push(randomRange(-0.5, 0.5));
_intermediArr.push(randomSign() * 0.5);
_intermediArr.push(randomSign() * 0.5);
randomSortArray(_intermediArr);
applyBoxThickness(_intermediArr, boxThickness);
Vec3.set(pos, _intermediArr[0], _intermediArr[1], _intermediArr[2]);
break;
default:
console.warn(`${emitFrom} is not supported for box emitter.`);
}
Vec3.copy(dir, particleEmitZAxis);
}
function circleEmit (radius, radiusThickness, theta, pos, dir) {
randomPointBetweenCircleAtFixedAngle(pos, radius * (1 - radiusThickness), radius, theta);
Vec3.normalize(dir, pos);
}
function applyBoxThickness (pos, thickness) {
if (thickness.x > 0) {
pos[0] += 0.5 * randomRange(-thickness.x, thickness.x);
pos[0] = clamp(pos[0], -0.5, 0.5);
}
if (thickness.y > 0) {
pos[1] += 0.5 * randomRange(-thickness.y, thickness.y);
pos[1] = clamp(pos[1], -0.5, 0.5);
}
if (thickness.z > 0) {
pos[2] += 0.5 * randomRange(-thickness.z, thickness.z);
pos[2] = clamp(pos[2], -0.5, 0.5);
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* Persistent disks can be attached to a compute instance using the `attachedDisk`
* section within the compute instance configuration.
* However there may be situations where managing the attached disks via the compute
* instance config isn't preferable or possible, such as attaching dynamic
* numbers of disks using the `count` variable.
*
* To get more information about attaching disks, see:
*
* * [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/instances/attachDisk)
* * How-to Guides
* * [Adding a persistent disk](https://cloud.google.com/compute/docs/disks/add-persistent-disk)
*
* **Note:** When using `gcp.compute.AttachedDisk` you **must** use `lifecycle.ignore_changes = ["attachedDisk"]` on the `gcp.compute.Instance` resource that has the disks attached. Otherwise the two resources will fight for control of the attached disk block.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const defaultInstance = new gcp.compute.Instance("defaultInstance", {
* machineType: "e2-medium",
* zone: "us-west1-a",
* bootDisk: {
* initializeParams: {
* image: "debian-cloud/debian-9",
* },
* },
* networkInterfaces: [{
* network: "default",
* }],
* });
* const defaultAttachedDisk = new gcp.compute.AttachedDisk("defaultAttachedDisk", {
* disk: google_compute_disk["default"].id,
* instance: defaultInstance.id,
* });
* ```
*
* ## Import
*
* Attached Disk can be imported the following ways
*
* ```sh
* $ pulumi import gcp:compute/attachedDisk:AttachedDisk default projects/{{project}}/zones/{{zone}}/instances/{{instance.name}}/{{disk.name}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/attachedDisk:AttachedDisk default {{project}}/{{zone}}/{{instance.name}}/{{disk.name}}
* ```
*/
export class AttachedDisk extends pulumi.CustomResource {
/**
* Get an existing AttachedDisk resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: AttachedDiskState, opts?: pulumi.CustomResourceOptions): AttachedDisk {
return new AttachedDisk(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:compute/attachedDisk:AttachedDisk';
/**
* Returns true if the given object is an instance of AttachedDisk. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is AttachedDisk {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === AttachedDisk.__pulumiType;
}
/**
* Specifies a unique device name of your choice that is
* reflected into the /dev/disk/by-id/google-* tree of a Linux operating
* system running within the instance. This name can be used to
* reference the device for mounting, resizing, and so on, from within
* the instance.
*/
public readonly deviceName!: pulumi.Output<string>;
/**
* `name` or `selfLink` of the disk that will be attached.
*/
public readonly disk!: pulumi.Output<string>;
/**
* `name` or `selfLink` of the compute instance that the disk will be attached to.
* If the `selfLink` is provided then `zone` and `project` are extracted from the
* self link. If only the name is used then `zone` and `project` must be defined
* as properties on the resource or provider.
*/
public readonly instance!: pulumi.Output<string>;
/**
* The mode in which to attach this disk, either READ_WRITE or
* READ_ONLY. If not specified, the default is to attach the disk in
* READ_WRITE mode.
*/
public readonly mode!: pulumi.Output<string | undefined>;
/**
* The project that the referenced compute instance is a part of. If `instance` is referenced by its
* `selfLink` the project defined in the link will take precedence.
*/
public readonly project!: pulumi.Output<string>;
/**
* The zone that the referenced compute instance is located within. If `instance` is referenced by its
* `selfLink` the zone defined in the link will take precedence.
*/
public readonly zone!: pulumi.Output<string>;
/**
* Create a AttachedDisk resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: AttachedDiskArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: AttachedDiskArgs | AttachedDiskState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as AttachedDiskState | undefined;
inputs["deviceName"] = state ? state.deviceName : undefined;
inputs["disk"] = state ? state.disk : undefined;
inputs["instance"] = state ? state.instance : undefined;
inputs["mode"] = state ? state.mode : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["zone"] = state ? state.zone : undefined;
} else {
const args = argsOrState as AttachedDiskArgs | undefined;
if ((!args || args.disk === undefined) && !opts.urn) {
throw new Error("Missing required property 'disk'");
}
if ((!args || args.instance === undefined) && !opts.urn) {
throw new Error("Missing required property 'instance'");
}
inputs["deviceName"] = args ? args.deviceName : undefined;
inputs["disk"] = args ? args.disk : undefined;
inputs["instance"] = args ? args.instance : undefined;
inputs["mode"] = args ? args.mode : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["zone"] = args ? args.zone : undefined;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(AttachedDisk.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering AttachedDisk resources.
*/
export interface AttachedDiskState {
/**
* Specifies a unique device name of your choice that is
* reflected into the /dev/disk/by-id/google-* tree of a Linux operating
* system running within the instance. This name can be used to
* reference the device for mounting, resizing, and so on, from within
* the instance.
*/
deviceName?: pulumi.Input<string>;
/**
* `name` or `selfLink` of the disk that will be attached.
*/
disk?: pulumi.Input<string>;
/**
* `name` or `selfLink` of the compute instance that the disk will be attached to.
* If the `selfLink` is provided then `zone` and `project` are extracted from the
* self link. If only the name is used then `zone` and `project` must be defined
* as properties on the resource or provider.
*/
instance?: pulumi.Input<string>;
/**
* The mode in which to attach this disk, either READ_WRITE or
* READ_ONLY. If not specified, the default is to attach the disk in
* READ_WRITE mode.
*/
mode?: pulumi.Input<string>;
/**
* The project that the referenced compute instance is a part of. If `instance` is referenced by its
* `selfLink` the project defined in the link will take precedence.
*/
project?: pulumi.Input<string>;
/**
* The zone that the referenced compute instance is located within. If `instance` is referenced by its
* `selfLink` the zone defined in the link will take precedence.
*/
zone?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a AttachedDisk resource.
*/
export interface AttachedDiskArgs {
/**
* Specifies a unique device name of your choice that is
* reflected into the /dev/disk/by-id/google-* tree of a Linux operating
* system running within the instance. This name can be used to
* reference the device for mounting, resizing, and so on, from within
* the instance.
*/
deviceName?: pulumi.Input<string>;
/**
* `name` or `selfLink` of the disk that will be attached.
*/
disk: pulumi.Input<string>;
/**
* `name` or `selfLink` of the compute instance that the disk will be attached to.
* If the `selfLink` is provided then `zone` and `project` are extracted from the
* self link. If only the name is used then `zone` and `project` must be defined
* as properties on the resource or provider.
*/
instance: pulumi.Input<string>;
/**
* The mode in which to attach this disk, either READ_WRITE or
* READ_ONLY. If not specified, the default is to attach the disk in
* READ_WRITE mode.
*/
mode?: pulumi.Input<string>;
/**
* The project that the referenced compute instance is a part of. If `instance` is referenced by its
* `selfLink` the project defined in the link will take precedence.
*/
project?: pulumi.Input<string>;
/**
* The zone that the referenced compute instance is located within. If `instance` is referenced by its
* `selfLink` the zone defined in the link will take precedence.
*/
zone?: pulumi.Input<string>;
} | the_stack |
import '@/style/masonry.less'
import { CONSTANTS } from '@/util/common'
import PageTitle from '@/components/PageTitle'
import applicationService from '@/models/application/service'
import { getTime } from '@/util/timestamp'
import DataSet from '@antv/data-set/lib/data-set'
import '@antv/data-set/lib/transform/fold'
import '@antv/data-set/lib/transform/percent'
import { Button, Col, Dropdown, Icon, Layout, Menu, Row, Tooltip as AntTooltip } from 'antd'
import _ from 'lodash'
import React from 'react'
import { RouteComponentProps } from 'react-router'
import { Axis, Bar, Chart, Coord, Legend, Line, Pie, Point, Tooltip } from 'viser-react'
import AppTbs from '@/components/appTbs'
interface ICUPRate {
item: string
count: number
}
interface IMemoryRate {
heap: number
nonheap: number
}
interface IGarbageGC {
marksweepCount: string
marksweepTime: string
scavengeCount: string
scavengeTime: string
timeText: string
}
interface IMetric {
timeText: string
committed: string
used: string
init: string
max: string
}
interface IJvmState {
metrics: IMetric[]
more: boolean
timer?: NodeJS.Timeout
garbageGc: IGarbageGC[]
statusGc: boolean
memoryRate: IMemoryRate[]
cupRate: ICUPRate[]
timeText: string
frequency: number
}
class Jvm extends React.Component<RouteComponentProps<{ id: string }>, IJvmState> {
public state: IJvmState = {
metrics: [],
garbageGc: [],
memoryRate: [],
cupRate: [],
more: true,
statusGc: true,
timeText: '1分钟',
frequency: 12,
}
private timer?: NodeJS.Timeout
private fieldsGc: string[] = ['marksweepCount', 'scavengeCount']
public menu = () => (
<Menu>
<Menu.Item onClick={this.changTime('1分钟', 12)}>1分钟</Menu.Item>
<Menu.Item onClick={this.changTime('5分钟', 60)}>5分钟</Menu.Item>
<Menu.Item onClick={this.changTime('10分钟', 120)}>10分钟</Menu.Item>
<Menu.Item onClick={this.changTime('30分钟', 360)}>30分钟</Menu.Item>
<Menu.Item onClick={this.changTime('1小时', 720)}>1小时</Menu.Item>
</Menu>
)
public componentWillUnmount = () => {
if (this.timer) {
clearTimeout(this.timer)
this.updateMetrics = () => null
}
}
public componentDidMount() {
this.updateMetrics(this.props.match.params.id)
this.timer = setInterval(() => {
this.updateMetrics(this.props.match.params.id)
}, 5000)
}
public render() {
const { Content } = Layout
const { metrics } = this.state
const dv = new DataSet.View().source(metrics)
dv.transform({
type: 'fold',
fields: ['committed', 'used', 'init', 'max'],
key: 'city',
value: 'temperature',
})
const data = dv.rows
const scale = [
{
dataKey: 'timeText',
min: 0,
max: 1,
tickCount: 4,
},
{
dataKey: 'temperature',
type: 'linear',
},
]
// 垃圾回收(GC) memoryRate
const garbageGcDv = new DataSet.View().source(this.state.garbageGc)
garbageGcDv.transform({
type: 'fold',
fields: this.fieldsGc,
key: 'city',
value: 'temperature',
})
const garbageGcDvData = garbageGcDv.rows
const garbageGcScale = [
{
dataKey: '',
min: 0,
max: 1,
tickCount: 4,
},
{
dataKey: 'temperature',
type: 'linear',
},
]
// 内存使用率
const memoryRateDv = new DataSet.View().source(this.state.memoryRate)
memoryRateDv.transform({
type: 'fold',
fields: ['heap', 'nonheap'],
key: 'name',
value: 'memoryUsage',
})
const memoryRateData = memoryRateDv.rows
// cpu使用率
const cpuscale = [
{
dataKey: 'percent',
min: 0,
formatter: '.0%',
},
]
const cpudv = new DataSet.View().source(this.state.cupRate)
cpudv.transform({
type: 'percent',
field: 'count',
dimension: 'item',
as: 'percent',
})
const cpudata = cpudv.rows
return (
<React.Fragment>
<Row>
<Col span={24}>
<AppTbs MenuData={this.props} />
</Col>
</Row>
<Layout className="page page-jvm">
<Row>
<Col span={24}>
<PageTitle
name="JVM"
info="实时展示应用实例的内存使用情况,GC次数,以及CPU和内存使用率"
rightPanelExtra={
<AntTooltip
title="heap dump 日志中会包含敏感信息
同时会占用实例的 CPU 硬盘, 需谨慎使用该功能">
<a href={applicationService.heapDownloadUrl(this.props.match.params.id)}>
<Button className="btn-download-dump" icon="download">
堆 Dump
</Button>
</a>
</AntTooltip>
}
/>
</Col>
</Row>
<Content>
<div className="antvMain">
<Row type="flex" align="middle" className="selectNode">
<Col push={2} span={12}>
<Icon type="bars" /> 时间范围
</Col>
<Col push={2} span={9}>
<Dropdown overlay={this.menu()} placement="bottomLeft">
<Button size="small">{this.state.timeText}</Button>
</Dropdown>
</Col>
</Row>
<div className="sectionBj">
<div style={{ width: '55%', float: 'left' }}>
<span className="title" style={{ paddingLeft: '25px' }}>
内存使用(单位: MB)
</span>
<Chart forceFit={true} height={400} data={data} scale={scale}>
<Tooltip />
<Axis />
<Legend />
<Line
position="timeText*temperature"
color={['city', CONSTANTS.CHART_COLORS]}
/>
<Point
position="timeText*temperature"
color={['city', CONSTANTS.CHART_COLORS]}
size={4}
style={{ stroke: '#fff', lineWidth: 1 }}
shape="circle"
/>
</Chart>
</div>
<div style={{ width: '40%', float: 'right' }}>
<span className="title">内存使用率(单位: %)</span>
<Chart forceFit={true} height={400} data={memoryRateData}>
<Tooltip />
<Axis />
<Legend />
<Bar position="name*memoryUsage" color={['name', CONSTANTS.CHART_COLORS]} />
</Chart>
</div>
<div style={{ clear: 'both' }} />
</div>
<div className="sectionBj">
<div style={{ width: '55%', float: 'left' }}>
<span className="title" style={{ paddingLeft: '25px' }}>
cpu使用率
</span>
<Chart forceFit={true} height={400} data={cpudata} scale={cpuscale}>
<Tooltip showTitle={false} />
<Coord type="theta" />
<Axis />
<Legend dataKey="item" />
<Pie
position="percent"
color={['item', [CONSTANTS.CHART_COLOR.RED, CONSTANTS.CHART_COLOR.BLUE]]}
style={{ stroke: '#fff', lineWidth: 1 }}
label={[
'percent',
{
formatter: (val: any, item: any) => {
return item.point.item + ': ' + val
},
},
]}
/>
</Chart>
</div>
<div style={{ width: '40%', float: 'right' }}>
<span className="title">
垃圾回收(GC)
<i
className={!this.state.statusGc ? 'btnI active' : 'btnI'}
onClick={this.timeGC}>
时间 ms
</i>
<i
className={this.state.statusGc ? 'btnI active' : 'btnI'}
onClick={this.countGC}>
次数
</i>
</span>
<Chart forceFit={true} height={400} data={garbageGcDvData} scale={garbageGcScale}>
<Tooltip />
<Axis />
<Legend />
<Line
position="timeText*temperature"
color={['city', CONSTANTS.CHART_COLORS]}
/>
<Point
position="timeText*temperature"
color={['city', CONSTANTS.CHART_COLORS]}
size={4}
style={{ stroke: '#fff', lineWidth: 1 }}
shape="circle"
/>
</Chart>
</div>
<div style={{ clear: 'both' }} />
</div>
</div>
</Content>
</Layout>
</React.Fragment>
)
}
private formatBytes = (byte: string) => {
return String((Number(byte) / 1024 / 1024).toFixed(2))
}
private countGC = () => {
this.fieldsGc = ['marksweepCount', 'scavengeCount']
this.setState({
statusGc: true,
})
}
private timeGC = () => {
this.fieldsGc = ['marksweepTime', 'scavengeTime']
this.setState({
statusGc: false,
})
}
private updateMetrics = (id: string) => {
applicationService.fetchApplicationMetrics(id).then(data => {
data.metrics = _.fromPairs(
Object.keys(data.metrics).map((k: string) => {
return [k, Number(_.get(data.metrics, k))]
})
) as MetricsData
const timeText: string = getTime('hms')
const newMetric: IMetric = {
timeText,
committed: '0',
used: '0',
init: '0',
max: '0',
}
/* tslint:disable:no-string-literal */
// 内存使用
if (this.state.metrics.length >= this.state.frequency) {
this.state.metrics.splice(0, this.state.metrics.length - this.state.frequency + 1)
}
newMetric.committed = this.formatBytes(data.metrics.heapCommitted)
newMetric.used = this.formatBytes(data.metrics.jvmMemoryUsedHeap)
newMetric.init = this.formatBytes(data.metrics.heapInit)
newMetric.max = this.formatBytes(data.metrics.heapMax)
// 垃圾回收(GC)
if (this.state.garbageGc.length >= this.state.frequency) {
this.state.garbageGc.splice(0, this.state.garbageGc.length - this.state.frequency + 1)
}
const newObjectGarbageGc: IGarbageGC = {
timeText,
marksweepCount: '0',
marksweepTime: '0',
scavengeCount: '0',
scavengeTime: '0',
}
newObjectGarbageGc.marksweepCount = data.metrics.gcPsMarksweepCount
newObjectGarbageGc.marksweepTime = data.metrics.gcPsMarksweepTime
newObjectGarbageGc.scavengeCount = data.metrics.gcPsScavengeCount
newObjectGarbageGc.scavengeTime = data.metrics.gcPsScavengeTime
// 内存使用率 memoryRate
const memoryRateData: IMemoryRate[] = [{ heap: 0, nonheap: 0 }]
const heap = Number(data.metrics.jvmMemoryUsedHeap) / Number(data.metrics.heapCommitted)
memoryRateData[0].heap = Number(String((heap * 100).toFixed(0)))
const nonheap =
Number(data.metrics.jvmMemoryUsedNonHeap) / Number(data.metrics.nonheapCommitted)
memoryRateData[0].nonheap = Number(String((nonheap * 100).toFixed(0)))
// cpu使用率
const cupRateData: ICUPRate[] = [
{
item: '未使用cpu',
count: Number(data.metrics.processors) - Number(data.metrics.systemloadAverage),
},
{ item: '已使用cpu', count: Number(data.metrics.systemloadAverage) },
]
this.setState({
metrics: this.state.metrics.concat([newMetric]),
more: true,
memoryRate: memoryRateData,
garbageGc: this.state.garbageGc.concat([newObjectGarbageGc]),
cupRate: cupRateData,
})
/* tslint:enable:no-string-literal */
})
}
private changTime = (timeText: string, frequency: number) => () => {
this.setState({
timeText,
frequency,
})
}
}
export default Jvm | the_stack |
import { isEmptyObject } from '@mpkit/util';
import { MpVirtualListComponentData, MpVirtualListComponentSpec } from '../../types/virtual-list';
const Mixin: MpVirtualListComponentSpec = {
data: {
$vlScrollTop: 0,
$vlContainerSelector: '.vl-scroller',
$vlTotalCount: 0,
$vlShowList: [],
$vlStartPlaceholderHeight: 0,
$vlEndPlaceholderHeight: 0,
$vlPageSize: 10,
$vlUpdateDelay: 50
},
methods: {
$vlReload() {
this.$vlComputeContainerHeight(() => {
this.setData(
{
$vlScrollTop: this.data.$vlScrollTop && this.data.$vlScrollTop < 5 ? 0 : 1
},
() => {
this.$vlComputeShowList();
}
);
});
},
$vlInit() {
if (!this.$vlScrollTop) {
this.$vlScrollTop = this.$vlOldScrollTop = 0;
}
if (!this.$vlAllList) {
this.$vlAllList = [];
}
delete this.$vlContainerHeightComputeing;
delete this.$vlContainerHeightComputeQueue;
this.setData({
$vlShowList: [],
$vlStartPlaceholderHeight: 0,
$vlEndPlaceholderHeight: 0
});
const pageSize = this.$vlGetPageSize ? this.$vlGetPageSize() : this.data.$vlPageSize;
this.$vlSetShowList(0, pageSize);
this.$vlReload();
},
$vlOnScroll(e) {
if (this.$vlClearing) {
return;
}
const { scrollTop } = e.detail;
this.$vlOldScrollTop = this.$vlScrollTop;
this.$vlScrollTop = scrollTop;
this.$vlComputeShowList();
},
$vlAddItem(item) {
if (!this.$vlAllList) {
this.$vlAllList = [];
}
const readyItem = this.$vlAllList.find((it) => it.id === item.id);
if (readyItem) {
Object.assign(readyItem, item);
} else {
this.$vlAllList.push(item);
}
const readyShowIndex = this.data.$vlShowList.findIndex((it) => it.id === item.id);
if (readyShowIndex !== -1) {
this.$vlLock();
this.setData(
{
[`$vlShowList[${readyShowIndex}]`]: readyItem
},
() => {
this.$vlUnLock();
}
);
return this.$vlListChange();
}
this.$vlListChange();
},
$vlListChange() {
this.setData({
$vlTotalCount: this.$vlAllList.length
});
this.$vlComputeShowList();
},
$vlClear(): Promise<void> {
return new Promise((resolve) => {
this.$vlClearing = true;
if (this.$vlSetDataTimer) {
clearTimeout(this.$vlSetDataTimer);
delete this.$vlSetDataTimer;
}
if (this.$vlComputeShowListTimer) {
clearTimeout(this.$vlComputeShowListTimer);
delete this.$vlComputeShowListTimer;
}
delete this.$vlItemClientRectQueryMap;
this.$vlScrollTop = 0;
this.$vlOldScrollTop = 0;
delete this.$vlAllList;
delete this.$vlItemHeightMap;
delete this.$vlContainerHeightComputeing;
delete this.$vlContainerHeightComputeQueue;
this.$vlStartIndex = 0;
this.$vlEndIndex = 0;
this.setData(
{
$vlScrollTop: 0,
$vlTotalCount: 0,
$vlShowList: [],
$vlStartPlaceholderHeight: 0,
$vlEndPlaceholderHeight: 0
},
() => {
this.$vlScrollTop = 0;
this.$vlOldScrollTop = 0;
delete this.$vlClearing;
setTimeout(resolve, 50);
}
);
});
},
$vlComputeItemHeight(id: string) {
if (!this.$vlItemHeightComputeMap) {
this.$vlItemHeightComputeMap = {};
}
if (!this.$vlItemHeightComputeMap[id]) {
this.$vlItemHeightComputeMap[id] = this.$getBoundingClientRect(`.vl-item-${id}`).then((res) => {
if (this?.$vlItemHeightComputeMap && this.$vlItemHeightComputeMap[id]) {
this.$vlSetItemHeight(id, res.height);
delete this.$vlItemHeightComputeMap[id];
}
return res.height;
});
}
return this.$vlItemHeightComputeMap[id];
},
$vlLockScrollTo(top: number) {
this.$vlLock();
this.setData(
{
$vlScrollTop: top
},
() => {
this.$vlUnLock();
}
);
},
$vlComputeContainerHeight(callback) {
if (this.$vlContainerHeightComputeing) {
this.$vlContainerHeightComputeQueue.push(callback);
return;
}
this.$vlContainerHeightComputeing = true;
this.$vlContainerHeightComputeQueue = [];
this.$getBoundingClientRect(this.data.$vlContainerSelector).then((res) => {
this.$vlContainerHeight = res.height;
this?.$vlOnContainerHeightComputed && this.$vlOnContainerHeightComputed();
callback?.(res.height);
if (!this.$vlContainerHeightComputeing) {
return;
}
delete this.$vlContainerHeightComputeing;
if (this?.$vlContainerHeightComputeQueue && this.$vlContainerHeightComputeQueue.length) {
const last = this.$vlContainerHeightComputeQueue.pop();
this.$vlContainerHeightComputeQueue.forEach((item) => {
item?.(res.height);
});
this.$vlComputeContainerHeight(last);
}
});
},
$vlComputeShowList() {
if (this.$vlClearing || !this.$vlAllList || !this.$vlAllList.length) {
return;
}
if (this.$vlIsLock) {
this.$vlHasListUpdate = true;
return;
}
if (this.$vlComputeShowListTimer) {
clearTimeout(this.$vlComputeShowListTimer);
}
// eslint-disable-next-line complexity
this.$vlComputeShowListTimer = setTimeout(() => {
const oldStart = this.$vlStartIndex;
const oldEnd = this.$vlEndIndex;
const vlGetItemHeight = this.$vlGetItemHeight ? this.$vlGetItemHeight : (): any => {};
let firstIntersectIndex = -1;
let visableHeight = 0;
let lastIntersectIndex = -1;
let top = 0;
if (!this.$vlItemHeightMap) {
this.$vlItemHeightMap = {};
}
const staticHeight =
this.data.$vlItemStaticHeight && typeof this.data.$vlItemStaticHeight === 'number'
? this.data.$vlItemStaticHeight
: null;
for (let index = 0, len = this.$vlAllList.length; index < len; index++) {
const item = this.$vlAllList[index];
(item as any).$vlIndex = index;
if (!staticHeight) {
this.$vlItemHeightMap[item.id] =
vlGetItemHeight.call(this, index) || this.$vlItemHeightMap[item.id] || 0;
} else {
this.$vlItemHeightMap[item.id] = staticHeight;
}
if (typeof this.$vlItemHeightMap[item.id] === 'number') {
top += this.$vlItemHeightMap[item.id];
} else if (typeof this.data.$vlItemPrecutHeight === 'number') {
top += this.data.$vlItemPrecutHeight;
}
if (top >= this.$vlScrollTop) {
if (firstIntersectIndex === -1) {
firstIntersectIndex = index;
}
if (firstIntersectIndex !== -1 && lastIntersectIndex === -1) {
visableHeight += this.$vlItemHeightMap[item.id];
}
if (visableHeight >= this.$vlContainerHeight && lastIntersectIndex === -1) {
lastIntersectIndex = index;
}
if (lastIntersectIndex !== -1 && firstIntersectIndex !== -1) {
break;
}
}
}
let newStart = oldStart;
let newEnd = oldEnd;
const pageSize = this.$vlGetPageSize ? this.$vlGetPageSize() : this.data.$vlPageSize;
let needUpdate = this.data.$vlShowList.length < pageSize;
needUpdate =
needUpdate ||
!this.$vlPrevScrollInfo ||
(this.$vlPrevScrollInfo[0] !== this.$vlOldScrollTop &&
this.$vlPrevScrollInfo[1] !== this.$vlScrollTop);
if (!needUpdate) {
return;
}
this.$vlPrevScrollInfo = [this.$vlOldScrollTop, this.$vlScrollTop];
if (this.data.$vlShowList.length < pageSize) {
newEnd = newStart + pageSize;
newEnd = newEnd <= this.$vlAllList.length ? newEnd : this.$vlAllList.length;
} else if (lastIntersectIndex !== -1 && firstIntersectIndex !== -1) {
let count = lastIntersectIndex - firstIntersectIndex;
newStart = firstIntersectIndex;
newEnd = lastIntersectIndex;
while (count < 20) {
newStart = newStart - 1 < 0 ? 0 : newStart - 1;
newEnd = newEnd + 1 > this.$vlAllList.length ? this.$vlAllList.length : newEnd + 1;
if (newEnd - newStart === count && newStart === 0 && newEnd === this.$vlAllList.length) {
break;
}
count = newEnd - newStart;
if (count > 20) {
if (newStart) {
newStart--;
} else {
newEnd--;
}
break;
}
}
if (newStart < 0) {
newStart = 0;
}
} else if (!oldStart && !oldEnd) {
newEnd = pageSize;
}
this.$vlSetShowList(newStart, newEnd);
}, this.data.$vlUpdateDelay);
},
$vlLock() {
this.$vlIsLock = true;
},
$vlUnLock() {
delete this.$vlIsLock;
if (this.$vlHasListUpdate) {
delete this.$vlHasListUpdate;
this.$vlComputeShowList();
}
},
// eslint-disable-next-line complexity
$vlSetShowList(startIndex, endIndex) {
const pageSize = this.$vlGetPageSize ? this.$vlGetPageSize() : this.data.$vlPageSize;
let vlStartIndex;
if (startIndex < 0) {
vlStartIndex = 0;
} else if (startIndex < this.$vlAllList.length) {
vlStartIndex = startIndex;
} else if (this.$vlAllList.length) {
vlStartIndex = this.$vlAllList.length - 1;
} else {
vlStartIndex = 0;
}
let vlEndIndex = endIndex < 0 || endIndex > this.$vlAllList.length ? this.$vlAllList.length : endIndex;
if (vlEndIndex - vlStartIndex < pageSize && this.$vlAllList.length < pageSize) {
vlStartIndex = 0;
vlEndIndex = this.$vlAllList.length;
}
const needUpdate =
vlStartIndex !== this.$vlStartIndex ||
vlEndIndex !== this.$vlEndIndex ||
!this.data.$vlShowList ||
!this.data.$vlShowList.length;
if (needUpdate) {
this.$vlStartIndex = vlStartIndex;
this.$vlEndIndex = vlEndIndex;
this.$vlLock();
let startHeight = 0;
let endHeight = 0;
const list = [];
const staticHeight =
this.data.$vlItemStaticHeight && typeof this.data.$vlItemStaticHeight === 'number'
? this.data.$vlItemStaticHeight
: null;
const precutHeight =
this.data.$vlItemPrecutHeight && typeof this.data.$vlItemPrecutHeight === 'number'
? this.data.$vlItemPrecutHeight
: null;
this.$vlAllList.forEach((item, index) => {
let itemHeight;
if (this.$vlItemHeightMap[item.id]) {
itemHeight = this.$vlItemHeightMap[item.id];
} else if (staticHeight) {
itemHeight = staticHeight;
} else if (typeof this.$vlGetItemHeight === 'function') {
itemHeight = this.$vlGetItemHeight(index) || 0;
} else if (precutHeight) {
itemHeight = precutHeight;
} else {
itemHeight = 0;
}
if (index < this.$vlStartIndex) {
startHeight += itemHeight;
} else if (index >= this.$vlStartIndex && index < this.$vlEndIndex) {
list.push(item);
} else {
endHeight += itemHeight;
}
});
const renderData: Partial<MpVirtualListComponentData> = {};
const renderCallbacks = [];
if (this.data.$vlStartPlaceholderHeight !== startHeight) {
renderData.$vlStartPlaceholderHeight = startHeight;
}
if (this.data.$vlEndPlaceholderHeight !== endHeight) {
renderData.$vlEndPlaceholderHeight = endHeight;
}
if (!this.$vlItemClientRectQueryMap) {
this.$vlItemClientRectQueryMap = {};
}
const mergeList = [];
list.forEach((item) => {
if (!this.$vlItemClientRectQueryMap[item.id]) {
this.$vlItemClientRectQueryMap[item.id] = () => {
return this.$vlComputeItemHeight(item.id).catch(() => Promise.resolve());
};
renderCallbacks.push(this.$vlItemClientRectQueryMap[item.id]);
}
mergeList.push(item);
});
renderData.$vlShowList = mergeList;
if (!isEmptyObject(renderData)) {
this.setData(renderData, () => {
Promise.all(renderCallbacks.map((item) => item())).then(() => {
setTimeout(() => {
this.$vlUnLock();
});
});
});
} else if (renderCallbacks.length) {
Promise.all(renderCallbacks.map((item) => item())).then(() => {
setTimeout(() => {
this.$vlUnLock();
});
});
} else {
setTimeout(() => {
this.$vlUnLock();
});
}
mergeList.forEach((item) => {
this.$vlRestoreItemState(item.id);
});
}
// log(
// "log",
// `${this.is}: vlIndex=${oldStart},${oldEnd} | ${this.$vlStartIndex},${this.$vlEndIndex}, pageSize=${pageSize}, allLength=${this.$vlAllList.length}, showLength=${this.data.$vlShowList.length}, start=${this.data.$vlStartPlaceholderHeight}, end=${this.data.$vlEndPlaceholderHeight}`
// );
},
$vlSetItemHeight(itemId: string, height: number) {
if (!this.$vlItemHeightMap) {
this.$vlItemHeightMap = {};
}
this.$vlItemHeightMap[itemId] = height;
this.$vlComputeShowList();
},
$vlSaveItemState(itemId: string, state: any, replace = false) {
if (!this.$vlItemState) {
this.$vlItemState = {};
}
if (replace || !this.$vlItemState[itemId]) {
this.$vlItemState[itemId] = state;
} else {
this.$vlItemState[itemId] = {
...this.$vlItemState[itemId],
...state
};
}
},
$vlRestoreItemState(itemId: string) {
if (this?.$vlItemState && this.$vlItemState[itemId] && this.$vlItemRestorePolicy) {
this.$vlItemRestorePolicy(itemId, this.$vlItemState[itemId]);
// delete this.$vlItemState[itemId];
}
}
}
};
export default Mixin; | the_stack |
import { SFC, b, emit, machine, router, memo, mount, linkTo } from '../../src';
import { machineRegistry } from '../../src/machineRegistry';
describe('basic apps', () => {
let $root = document.body;
test('simple video app', () => {
/**
*
* goals:
*
* - check if child is same for leaf nodes when they don't render!
* - both memoInstance and isLeaf are tested here! (count renders for memoized fn)
* - test cond
*
* - include a parent machine and leaf machines
* - include both reusable machines and singleton machines (like home)
*
*/
const MyRouter = router({
'/': () => <Home />,
'/videos': () => <VideoList initialCategory={'sports'} />,
});
const App: SFC = () => (
<div>
<h1>baahu video app</h1>
<MyRouter />
<MyFooter path={location.pathname} />
</div>
);
const footerRenders: string[] = [];
const MyFooter = memo<{ path: string }>(({ path }) => {
footerRenders.push('render');
return <h3>{path}</h3>;
});
// using arrays to define machines and states
// so we can iterate through them for tests
const Home = machine<{}, HomeState, HomeEvent, {}>({
id: 'Home',
initial: 'loading',
context: () => ({}),
when: {
loading: {
on: {
LOADED_CATEGORIES: {
to: 'loaded',
},
},
},
loaded: {},
},
render: state => {
switch (state) {
case 'loading':
return (
<div>
<button
onClick={() => emit({ type: 'LOADED_CATEGORIES' }, 'Home')}
>
load
</button>
</div>
);
case 'loaded':
return (
<div>
<button onClick={() => linkTo('/videos')}>
go to video list
</button>
</div>
);
}
},
});
// videolist is parent + static/singleton
// video is leaf + dynamic/many instances
const VideoList = machine<
VideoListProps,
VideoListState,
VideoListEvent,
VideoListContext
>({
id: 'VideoList',
initial: 'loading',
context: props => ({
category: props.initialCategory,
}),
when: {
loading: {
on: {
LOADED_VIDEOS: {
to: 'loaded',
},
},
},
loaded: {
on: {
CHANGE_CATEGORY: {
to: 'loading',
do: (ctx, e) => (ctx.category = e.category),
if: (ctx, e) => ctx.category !== e.category,
},
},
},
},
render: (state, { category }) => {
switch (state) {
case 'loading':
return (
<div>
<p>loading videos</p>
</div>
);
case 'loaded':
return (
<div>
<h2>category: {category}</h2>
<Video category={category} listPosition={1} />
<Video category={category} listPosition={2} />
<Video category={category} listPosition={3} />
<br />
<div>
<h3>change category</h3>
{videoListCategory.map(category => (
<button
onClick={() =>
emit({ type: 'CHANGE_CATEGORY', category })
}
>
{category}
</button>
))}
</div>
</div>
);
}
},
});
const Video = machine<VideoProps, VideoState, VideoEvent, {}>({
id: props => `video-${props.category}-${props.listPosition}`,
context: () => ({}),
initial: 'buffering',
when: {
buffering: {
on: {
CAN_PLAY: {
to: 'playing',
},
},
},
playing: {
on: {
NEEDS_TO_BUFFER: {
to: 'buffering',
},
PAUSE: {
to: 'paused',
},
},
},
paused: {
on: {
PLAY: {
to: 'playing',
},
},
},
},
render: state => {
switch (state) {
case 'buffering':
return <p>buffering</p>;
case 'playing':
return <p>playing</p>;
case 'paused':
return <p>paused</p>;
}
},
});
$root = mount(App, $root);
/**
* SCRIPTING USER INTERACTION
*/
expect(footerRenders.length).toBe(1);
expect($root.firstChild?.nodeName).toBe('H1');
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('BUTTON');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe('load');
expect(machineRegistry.size).toBe(1);
// don't respond to this
emit({ type: 'LOADED_VIDEOS' }, 'Home');
expect(footerRenders.length).toBe(1);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('BUTTON');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe('load');
emit({ type: 'LOADED_CATEGORIES' }, 'Home');
expect(footerRenders.length).toBe(1);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('BUTTON');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
'go to video list'
);
linkTo('/videos');
// memo doesn't bail out of route changes
expect(footerRenders.length).toBe(2);
expect($root.childNodes[1].childNodes.length).toBe(1);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('P');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
'loading videos'
);
expect(machineRegistry.size).toBe(1);
emit({ type: 'LOADED_VIDEOS' });
expect(footerRenders.length).toBe(2);
expect($root.childNodes[1].childNodes.length).toBe(6);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('H2');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
`category: `
);
expect($root.childNodes[1]?.firstChild?.childNodes[1]?.nodeValue).toBe(
`sports`
);
// first video node (1)
expect($root.childNodes[1]?.childNodes[1]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[1]?.firstChild?.nodeValue).toBe(
`buffering`
);
// second video node
expect($root.childNodes[1]?.childNodes[2]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[2]?.firstChild?.nodeValue).toBe(
`buffering`
);
expect(machineRegistry.size).toBe(4);
// should't change anything
emit({ type: 'CHANGE_CATEGORY', category: 'sports' }, 'VideoList');
// first video node (2)
expect($root.childNodes[1]?.childNodes[1]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[1]?.firstChild?.nodeValue).toBe(
`buffering`
);
// second video node
expect($root.childNodes[1]?.childNodes[2]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[2]?.firstChild?.nodeValue).toBe(
`buffering`
);
const firstLeafMachVNodeBefore = machineRegistry.get('video-sports-1')?.v;
const firstLeafChildBefore = firstLeafMachVNodeBefore?.c;
const secondLeafMachVNodeBefore = machineRegistry.get('video-sports-2')?.v;
const secondLeafChildBefore = secondLeafMachVNodeBefore?.c;
expect(machineRegistry.size).toBe(4);
// let beforeChildren = secondLeafNodeBefore?.c.c as VNode[];
// console.log(beforeChildren[0]);
// console.log('above is first time');
/**
*
* should change the second video node only (DO LEAF TEST HERE)
*
*/
emit({ type: 'CAN_PLAY' }, `video-sports-2`);
expect($root.childNodes[1]?.childNodes[1]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[1]?.firstChild?.nodeValue).toBe(
`buffering`
);
// second video node
expect($root.childNodes[1]?.childNodes[2]?.nodeName).toBe(`P`);
const problematicNode = $root.childNodes[1]?.childNodes[2]?.firstChild;
expect(problematicNode?.nodeValue).toBe(`playing`);
expect(machineRegistry.size).toBe(4);
const firstLeafMachVNodeAfter = machineRegistry.get('video-sports-1')?.v;
const firstLeafChildAfter = firstLeafMachVNodeAfter?.c;
const secondLeafMachVNodeAfter = machineRegistry.get('video-sports-2')?.v;
const secondLeafChildAfter = secondLeafMachVNodeAfter?.c;
/** The machine nodes themselves should be the same, but the child should change for the
* second leaf (as it went thru a transition), and should be the same for the first leaf (no transition)
* */
expect(firstLeafMachVNodeBefore === firstLeafMachVNodeAfter).toBe(true);
expect(firstLeafChildBefore === firstLeafChildAfter).toBe(true);
expect(secondLeafMachVNodeBefore === secondLeafMachVNodeBefore).toBe(true);
expect(secondLeafChildBefore === secondLeafChildAfter).toBe(false);
/**
*
* this should change the videolist state
*
* */
emit({ type: 'CHANGE_CATEGORY', category: 'tech' });
expect($root.childNodes[1].childNodes.length).toBe(1);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('P');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
'loading videos'
);
expect(machineRegistry.size).toBe(1);
emit({ type: 'LOADED_VIDEOS' });
expect(footerRenders.length).toBe(2);
expect($root.childNodes[1].childNodes.length).toBe(6);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('H2');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
`category: `
);
expect($root.childNodes[1]?.firstChild?.childNodes[1]?.nodeValue).toBe(
`tech`
);
// first video node (3)
expect($root.childNodes[1]?.childNodes[1]?.nodeName).toBe(`P`);
expect($root.childNodes[1]?.childNodes[1]?.firstChild?.nodeValue).toBe(
`buffering`
);
// second video node. should come back to buffering (new instance)
expect($root.childNodes[1]?.childNodes[2]?.nodeName).toBe(`P`);
expect(machineRegistry.get('video-tech-2')?.st).toBe('buffering');
expect(
problematicNode !== $root.childNodes[1]?.childNodes[2]?.firstChild
).toBe(true);
expect($root.childNodes[1]?.childNodes[2]?.firstChild?.nodeValue).toBe(
`buffering`
);
expect(machineRegistry.size).toBe(4);
/**
*
* go back home
*
*/
linkTo('/');
expect(footerRenders.length).toBe(3);
expect($root.firstChild?.nodeName).toBe('H1');
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('BUTTON');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe('load');
/**
*
* go back to /videos. videolist should be loading
*
*/
linkTo('/videos');
expect(footerRenders.length).toBe(4);
expect($root.childNodes[1]?.firstChild?.nodeName).toBe('P');
expect($root.childNodes[1]?.firstChild?.firstChild?.nodeValue).toBe(
'loading videos'
);
});
});
const homeState = ['loading', 'loaded'] as const;
type HomeState = typeof homeState[number];
type HomeEvent = { type: 'LOADED_CATEGORIES' };
const videoListState = ['loading', 'loaded'] as const;
type VideoListProps = {
initialCategory: VideoListCategory;
};
type VideoListState = typeof videoListState[number];
const videoListCategory = ['comedy', 'action', 'sports', 'tech'] as const;
type VideoListCategory = typeof videoListCategory[number];
type VideoListContext = {
category: VideoListCategory;
};
type VideoListEvent =
| { type: 'LOADED_VIDEOS' }
| { type: 'CHANGE_CATEGORY'; category: VideoListCategory };
type VideoProps = {
listPosition: number;
category: VideoListCategory;
};
const videoState = ['buffering', 'playing', 'paused'] as const;
type VideoState = typeof videoState[number];
type VideoEvent =
| { type: 'CAN_PLAY' }
| { type: 'NEEDS_TO_BUFFER' }
| { type: 'PLAY' }
| { type: 'PAUSE' }; | the_stack |
import {assert} from '@esm-bundle/chai';
import {fixture} from '@open-wc/testing';
import {timeOut} from '@polymer/polymer/lib/utils/async.js';
import {Debouncer} from '@polymer/polymer/lib/utils/debounce.js';
import {spy, SinonSpy} from 'sinon';
import {GoogleChart} from '../google-chart.js';
import {dataTable, load, DataTableLike} from '../loader.js';
import {ready} from './helpers.js';
suite('<google-chart>', function() {
var chart: GoogleChart;
var waitCheckAndDoneDebouncer: Debouncer|null;
setup(async function() {
chart = await fixture('<google-chart></google-chart>') as GoogleChart;
waitCheckAndDoneDebouncer = null;
});
var waitCheckAndDone = function(check: () => unknown, done: () => void) {
setTimeout(function() {
if (check()) {
waitCheckAndDoneDebouncer = Debouncer.debounce(
waitCheckAndDoneDebouncer, timeOut.after(100), done);
} else {
waitCheckAndDone(check, done);
}
}, 50);
};
suite('Default Functionality', function() {
setup(function() {
chart.data = [ ['Data', 'Value'], ['Something', 1] ];
});
test('fires google-chart-ready event for initial load', function(done) {
chart.addEventListener('google-chart-ready', function() {
assert.isTrue(chart.drawn);
done();
});
});
test('fires google-chart-ready event for redraw call', function(done) {
var drawCount = 0;
chart.addEventListener('google-chart-ready', function() {
assert.isTrue(chart.drawn);
++drawCount;
if (drawCount == 4) { done(); }
else chart.redraw();
});
});
test('default type is column', function() {
assert.equal(chart.type, 'column');
});
test('can change type', function(done) {
chart.type = 'line';
waitCheckAndDone(function() {
// A circle indicates the chart type change was drawn:
return chart.shadowRoot!.querySelector('circle');
}, done);
});
test('default selection is null', function() {
assert.equal(chart.selection, null);
});
test('can change selection', function(done) {
chart.selection = [ {row: 0} ];
waitCheckAndDone(function() {
// A white stroked rectangle signals the selection was drawn:
return chart.shadowRoot!.querySelector('rect[stroke="#ffffff"]');
}, done);
});
test('updates selection', function(done) {
chart.data = [
['Data', 'Value'],
['Something 1', 1],
['Something 2', 2],
['Something 3', 3],
];
chart.addEventListener('google-chart-select', () => {
assert.sameDeepMembers(chart.selection!, [ {row: 2, column: 1} ]);
done();
}, {once: true});
chart.selection = [ {row: 0, column: 1} ];
chart.addEventListener('google-chart-ready', () => {
// Look for something that can be clicked. Find rectangles for legend
// and each bar.
const chartDiv = chart.shadowRoot!.getElementById('chartdiv')!;
const rects = chartDiv.querySelectorAll('rect[fill="#3366cc"]');
// Click on the last bar ('Something 3').
rects[3].dispatchEvent(new MouseEvent('click', {bubbles: true}));
}, {once: true});
});
test('default options are null', function() {
assert.equal(chart.options, null);
});
test('can change options', function(done) {
var expectedTitle = 'New Title';
var initialDraw = true;
chart.addEventListener('google-chart-ready', function() {
if (initialDraw) {
initialDraw = false;
chart.options = {'title': expectedTitle};
} else {
assert.equal(chart.shadowRoot!.querySelector('text')!.innerHTML, expectedTitle);
done();
}
});
});
test('can change deep options', function(done) {
chart.options = {'title': 'Old Title'};
var spyRedraw: SinonSpy;
var expectedTitle = 'New Title';
var initialDraw = true;
chart.addEventListener('google-chart-ready', function() {
if (initialDraw) {
spyRedraw = spy(chart['chartWrapper']!, 'draw');
initialDraw = false;
const options = chart.options as google.visualization.ColumnChartOptions;
options.title = 'Debounced Title';
chart.options = options;
options.title = expectedTitle;
chart.options = options;
assert.isFalse(spyRedraw.called);
} else {
assert.equal(chart.shadowRoot!.querySelector('text')!.innerHTML, expectedTitle);
assert.isTrue(spyRedraw.calledOnce);
spyRedraw.restore();
done();
}
});
});
test('creates png chart uri', function (done) {
chart.addEventListener('google-chart-ready', function(event) {
var uri = chart.imageURI!;
assert.isString(uri);
assert.match(uri, /^data:image\/png;base64/, 'png regexp matches');
done();
});
});
test('can render multiple instances', function (done) {
var secondChart = document.createElement('google-chart');
secondChart.data = [ ['Data', 'Value'], ['Something', 1] ];
// Ensure second chart is rendered. Clean up test.
secondChart.addEventListener('google-chart-ready', function() {
document.body.removeChild(secondChart);
done();
});
document.body.appendChild(secondChart);
});
});
suite('Class', () => {
test('can be created', async () => {
const chart = new GoogleChart();
chart.data = [ ['Data', 'Value'], ['Something', 1] ];
document.body.appendChild(chart);
await ready(chart);
document.body.removeChild(chart);
});
});
suite('Redrawing', () => {
let chart: GoogleChart;
let dt: google.visualization.DataTable;
setup(async () => {
chart = await fixture('<google-chart></google-chart>') as GoogleChart;
dt = await dataTable(undefined);
dt.addColumn('number', 'x');
dt.addColumn('number', 'y');
dt.addRow([1, 1]);
});
async function countBars(chart: GoogleChart) {
await ready(chart);
return Array.from(chart.shadowRoot!.querySelectorAll('rect[fill="#3366cc"]')).length;
}
test('redraws after DataTable change', async () => {
chart.data = dt;
const barsBefore = await countBars(chart);
dt.addRow([2, 2]);
chart.redraw();
const barsAfter = await countBars(chart);
assert.isAbove(barsAfter, barsBefore);
});
test('redraws after DataView change', async () => {
const view = new google.visualization.DataView(dt);
chart.view = view;
const barsBefore = await countBars(chart);
dt.addRow([2, 2]);
chart.redraw();
const barsAfter = await countBars(chart);
assert.isAbove(barsAfter, barsBefore);
});
});
suite('Events', function() {
setup(function() {
chart.data = [ ['Data', 'Value'], ['Something', 1] ];
});
test('can be added', function(done) {
chart.events = ['onmouseover'];
chart.addEventListener('google-chart-ready', function() {
google.visualization.events.trigger(
chart['chartWrapper']!.getChart(), 'onmouseover', {'row': 1, 'column': 5});
});
chart.addEventListener('google-chart-onmouseover', function(e) {
const {detail} = (e as CustomEvent);
assert.equal(detail.data.row, 1);
assert.equal(detail.data.column, 5);
done();
});
});
});
suite('Data Source Types', function() {
test('[rows] and [cols]', function (done) {
chart.cols = [
{'label': 'Data', 'type': 'string'},
{'label': 'Value', 'type': 'number'}
];
chart.rows = [
['Something', 1]
];
chart.addEventListener('google-chart-ready', function() {
done();
});
});
test('[rows] and [cols] with date string repr is broken', function(done) {
chart.cols = [ { 'type': 'date' } ];
chart.rows = [ ['Date(1789, 3, 30)'] ];
waitCheckAndDone(function() {
const chartDiv = chart.shadowRoot!.getElementById('chartdiv')!;
return chartDiv.innerHTML ==
'Error: Type mismatch. Value Date(1789, 3, 30) ' +
'does not match type date in column index 0';
}, done);
});
var setDataAndWaitForRender = function(data: DataTableLike|string, done: () => void) {
chart.data = data;
chart.addEventListener('google-chart-ready', function() {
done();
});
};
test('[data] is 2D Array', function(done) {
setDataAndWaitForRender([ ['Data', 'Value'], ['Something', 1] ], done);
});
test('[data] is DataTable Object format', function(done) {
setDataAndWaitForRender({
'cols': [
{'label': 'Data', 'type': 'string'},
{'label': 'Value', 'type': 'number'}
],
'rows': [
{'c': ['Someting', 1]} as any
]
}, done);
});
test('[data] is DataTable', function(done) {
chart.addEventListener('google-chart-ready', function() {
done();
});
dataTable([
['Data', 'Value'],
['Something', 1]
]).then(function(dataTable) {
chart.data = dataTable;
});
});
test('[data] is DataTable from DataSource Query', function(done) {
chart.addEventListener('google-chart-ready', function() {
done();
});
load().then(() => {
const q = new google.visualization.Query('test/query.json');
q.send((res) => {
chart.data = res.getDataTable();
});
});
});
test('[data] is JSON URL for 2D Array', function(done) {
setDataAndWaitForRender('test/test-data-array.json', done);
});
test('[data] is JSON URL for DataTable Object format', function(done) {
setDataAndWaitForRender('test/test-data-object.json', done);
});
test('[view] is DataView', function(done) {
chart.addEventListener('google-chart-ready', function() {
done();
});
dataTable([
['Data', 'Value'],
['Something', 1]
])
.then((dataTable) => {
chart.view = new google.visualization.DataView(dataTable);
});
});
test('multiple calls to JSON URL', function(done) {
setDataAndWaitForRender('test/test-data-array.json', function() {
setDataAndWaitForRender('test/test-data-object.json', done);
});
});
});
suite('Timeline chart workaround', () => {
// Handle `setSelection` on timeline chart that emits `select` event.
test('set selection does not loop', async () => {
chart.type = 'timeline';
chart.cols = [
{type: 'string', label: 'President'},
{type: 'date', label: 'Start'},
{type: 'date', label: 'End'},
];
chart.rows = [
['Washington', new Date(1789, 3, 30), new Date(1797, 2, 4)],
['Adams', new Date(1797, 2, 4), new Date(1801, 2, 4)],
['Jefferson', new Date(1801, 2, 4), new Date(1809, 2, 4)],
];
await ready(chart);
// Set and update the selection.
chart.selection = [{row: 0, column: null}];
chart.selection = [{row: 1, column: null}];
assert.isDefined(chart.selection);
});
});
}); | the_stack |
import { Component, OnInit } from '@angular/core';
import { AuthFacade } from '../../../+state/auth.facade';
import { GarlandToolsService } from '../../../core/api/garland-tools.service';
import { TranslateService } from '@ngx-translate/core';
import { BellNodesService } from '../../../core/data/bell-nodes.service';
import { LocalizedDataService } from '../../../core/data/localized-data.service';
import { AlarmsFacade } from '../../../core/alarms/+state/alarms.facade';
import { LazyDataService } from '../../../core/data/lazy-data.service';
import { first, map, shareReplay, startWith, tap } from 'rxjs/operators';
import { BehaviorSubject, combineLatest, Observable, of, ReplaySubject } from 'rxjs';
import { fishingLog } from '../../../core/data/sources/fishing-log';
import { spearFishingNodes } from '../../../core/data/sources/spear-fishing-nodes';
import { spearFishingLog } from '../../../core/data/sources/spear-fishing-log';
import { fishParameter } from '../../../core/data/sources/fish-parameter';
import { TrackerComponent } from '../tracker-component';
import { fishEyes } from '../../../core/data/sources/fish-eyes';
import { fshLogOrder } from '../fsh-log-order';
import { fshSpearLogOrder } from '../fsh-spear-log-order';
@Component({
selector: 'app-fishing-log-tracker',
templateUrl: './fishing-log-tracker.component.html',
styleUrls: ['./fishing-log-tracker.component.less']
})
export class FishingLogTrackerComponent extends TrackerComponent implements OnInit {
private completion$ = this.authFacade.user$.pipe(
map(user => user.gatheringLogProgression),
startWith([])
);
public type$: BehaviorSubject<number> = new BehaviorSubject<number>(0);
public display$: Observable<any[]>;
public tabsDisplay$: Observable<any>;
public pageDisplay$: Observable<any[]>;
public spotId$: ReplaySubject<number> = new ReplaySubject<number>();
public fshDataCache: any[] = [];
public loading = true;
public fishEyesFormat = 1;
public hideCompleted = false;
constructor(private authFacade: AuthFacade, private gt: GarlandToolsService, private translate: TranslateService,
private bell: BellNodesService, private l12n: LocalizedDataService, protected alarmsFacade: AlarmsFacade,
private lazyData: LazyDataService) {
super(alarmsFacade);
}
public getFshIcon(index: number): string {
return [
'./assets/icons/angling.png',
'./assets/icons/spearfishing.png'
][index];
}
public toggleFishEyesFormat(): void {
this.fishEyesFormat = this.fishEyesFormat === 1 ? 60 : 1;
}
public markAsDone(itemId: number, done: boolean): void {
this.authFacade.user$.pipe(first()).subscribe(user => {
if (done) {
user.gatheringLogProgression.push(itemId);
} else {
user.gatheringLogProgression = user.gatheringLogProgression.filter(entry => entry !== itemId);
}
this.authFacade.updateUser(user);
});
}
public getFshData(fish: any): any[] {
if (this.fshDataCache[fish.itemId] === undefined) {
this.fshDataCache[fish.itemId] = this._getFshData(fish);
}
return this.fshDataCache[fish.itemId];
}
private _getFshData(fish: any): any[] {
const spots = this.gt.getFishingSpots(fish.itemId);
if (fish.id >= 20000) {
const logEntries = spearFishingLog.filter(entry => entry.itemId === fish.id);
const spot = spearFishingNodes.find(node => node.itemId === fish.itemId);
return logEntries
.map(entry => {
const result: any = {
id: fish.id,
zoneid: entry.zoneId,
mapId: entry.mapId,
x: entry.coords.x,
y: entry.coords.y,
level: entry.level,
type: 4,
itemId: fish.itemId,
icon: fish.icon,
timed: fish.spawn !== undefined
};
if (spot !== undefined) {
result.gig = spot.gig;
}
if (spot.spawn !== undefined) {
result.spawnTimes = [spot.spawn];
result.uptime = spot.duration;
// Just in case it despawns the day after.
result.uptime = result.uptime < 0 ? result.uptime + 24 : result.uptime;
// As uptimes are always in minutes, gotta convert to minutes here too.
result.uptime *= 60;
}
if (spot.predator) {
result.predators = spot.predator.map(predator => {
const itemId = +Object.keys(this.lazyData.items).find(key => this.lazyData.items[key].en === predator.name);
return {
id: itemId,
icon: this.lazyData.icons[itemId],
amount: predator.predatorAmount
};
});
}
return result;
})
.slice(0, 1);
} else {
if (spots.length > 0) {
return spots
.map(spot => {
const mapId = this.l12n.getMapId(spot.zone);
const zoneId = this.l12n.getAreaIdByENName(spot.title);
if (mapId !== undefined) {
const result: any = {
id: fish.id,
zoneid: zoneId,
mapId: mapId,
x: spot.coords[0],
y: spot.coords[1],
level: spot.lvl,
type: 4,
itemId: spot.id,
icon: spot.icon,
timed: spot.during !== undefined,
fishEyes: spot.fishEyes || fishEyes[fish.itemId] !== undefined,
snagging: spot.snagging
};
if (spot.during !== undefined) {
result.spawnTimes = [spot.during.start];
result.uptime = spot.during.end - spot.during.start;
// Just in case it despawns the day after.
result.uptime = result.uptime < 0 ? result.uptime + 24 : result.uptime;
// As uptimes are always in minutes, gotta convert to minutes here too.
result.uptime *= 60;
}
if (spot.predator) {
result.predators = spot.predator.map(predator => {
return {
id: predator.id,
icon: predator.icon,
amount: predator.predatorAmount
};
});
}
if (spot.hookset) {
result.hookset = spot.hookset.split(' ')[0].toLowerCase();
}
result.baits = spot.bait.map(bait => {
const baitData = this.gt.getBait(bait);
return {
icon: baitData.icon,
id: baitData.id
};
});
if (spot.weather) {
result.weathers = spot.weather.map(w => this.l12n.getWeatherId(w));
}
if (spot.transition) {
result.weathersFrom = spot.transition.map(w => this.l12n.getWeatherId(w));
}
return result;
}
return undefined;
})
.filter(res => res !== undefined)
.slice(0, 1);
}
}
return null;
}
ngOnInit(): void {
const completeDisplay$ = of([fishingLog, spearFishingLog]).pipe(
map((logs) => {
return logs.map((log: any[], type) => {
const display = { tabs: [], total: 0, done: 0 };
log.forEach(entry => {
let row = display.tabs.find(e => e.mapId === entry.mapId);
if (row === undefined) {
display.tabs.push({
mapId: entry.mapId,
placeId: entry.placeId,
done: 0,
total: 0,
spots: []
});
row = display.tabs[display.tabs.length - 1];
}
const spotId = entry.spot ? entry.spot.id : entry.id;
let spot = row.spots.find(s => s.id === spotId);
if (spot === undefined) {
const coords = entry.spot ? entry.spot.coords : entry.coords;
row.spots.push({
id: spotId,
placeId: entry.zoneId,
mapId: entry.mapId,
done: 0,
total: 0,
coords: coords,
fishes: []
});
spot = row.spots[row.spots.length - 1];
}
if (type === 0) {
const parameter = fishParameter[entry.id];
const fish: any = {
id: spot.id,
itemId: entry.itemId,
level: entry.level,
icon: entry.icon,
data: this.getFshData({ ...entry, id: entry.spot.id })
};
if (parameter) {
fish.timed = parameter.timed;
fish.weathered = parameter.weathered;
}
spot.fishes.push(fish);
} else {
const node = spearFishingNodes.find(n => n.id === entry.itemId);
const data = this.getFshData(node);
spot.fishes.push({
id: spot.id,
itemId: node.itemId,
level: node.level,
icon: node.icon,
data: data,
timed: data[0] && data[0].timed
});
}
});
return display;
});
}),
tap(() => this.loading = false),
shareReplay(1)
);
this.display$ = combineLatest([completeDisplay$, this.completion$]).pipe(
map(([completeDisplay, completion]: [any, number[]]) => {
return completeDisplay.map(display => {
const uniqueDisplayDone = [];
const uniqueDisplayTotal = [];
display.tabs.forEach(area => {
const uniqueMapDone = [];
const uniqueMapTotal = [];
area.spots.forEach(spot => {
const uniqueSpotDone = [];
const uniqueSpotTotal = [];
spot.fishes.forEach(fish => {
fish.done = completion.indexOf(fish.itemId) > -1;
if (uniqueMapTotal.indexOf(fish.itemId) === -1) {
uniqueMapTotal.push(fish.itemId);
}
if (uniqueDisplayTotal.indexOf(fish.itemId) === -1) {
uniqueDisplayTotal.push(fish.itemId);
}
if (uniqueSpotTotal.indexOf(fish.itemId) === -1) {
uniqueSpotTotal.push(fish.itemId);
}
if (fish.done) {
if (uniqueDisplayDone.indexOf(fish.itemId) === -1) {
uniqueDisplayDone.push(fish.itemId);
}
if (uniqueMapDone.indexOf(fish.itemId) === -1) {
uniqueMapDone.push(fish.itemId);
}
if (uniqueSpotDone.indexOf(fish.itemId) === -1) {
uniqueSpotDone.push(fish.itemId);
}
}
});
spot.total = uniqueSpotTotal.length;
spot.done = uniqueSpotDone.length;
});
area.spots = area.spots
.sort((a, b) => {
if (a.id > 20000) {
return fshSpearLogOrder.indexOf(this.l12n.getPlace(a.placeId).en) - fshSpearLogOrder.indexOf(this.l12n.getPlace(b.placeId).en);
}
return fshLogOrder.indexOf(this.l12n.getPlace(a.placeId).en) - fshLogOrder.indexOf(this.l12n.getPlace(b.placeId).en);
});
area.total = uniqueMapTotal.length;
area.done = uniqueMapDone.length;
});
display.tabs = display.tabs
.sort((a, b) => {
if (a.id > 20000) {
return fshSpearLogOrder.indexOf(this.l12n.getPlace(a.placeId).en) - fshSpearLogOrder.indexOf(this.l12n.getPlace(b.placeId).en);
}
return fshLogOrder.indexOf(this.l12n.getPlace(a.placeId).en) - fshLogOrder.indexOf(this.l12n.getPlace(b.placeId).en);
});
display.total = uniqueDisplayTotal.length;
display.done = uniqueDisplayDone.length;
return display;
});
}),
shareReplay(1)
);
this.tabsDisplay$ = combineLatest(this.display$, this.type$).pipe(
map(([display, type]) => display[type])
);
this.pageDisplay$ = combineLatest(this.tabsDisplay$, this.spotId$).pipe(
map(([display, spotId]) => {
const area = display.tabs.find(a => a.spots.some(spot => spot.id === spotId));
if (area !== undefined) {
return area.spots.find(spot => spot.id === spotId);
}
return null;
})
);
}
} | the_stack |
import { AddressType, Utxo } from '../../Wallet'
import { generateTestTransaction } from './test_transaction'
import { addressDiscoveryWithinBounds, generateMnemonic } from '../../Utils'
import { InMemoryKeyManager, RustCardano } from '../../lib'
import { ChainSettings } from '../../Cardano'
/*
This seed generates the following "chain state"
Account 1 = mnemonic1:
- Some UTXOs in the first BIP44 range (first 20 addresses), for external addresses
- Account 1 can be considered a genesis account, as it moved funds to other accounts without UTXOs
- Its mnemonic won't be exposed as without txOut, it is useless,
Account 2 = mnemonic2
- No change, only receipt, in the first and second BIP44 ranges (first 40 addresses)
Account 3 = mnemonic3
- No UTXOs
*/
export async function generateSeed () {
const mnemonic1 = generateMnemonic()
const mnemonic2 = generateMnemonic()
const mnemonic3 = generateMnemonic()
const account1 = await InMemoryKeyManager(RustCardano, { password: '', mnemonic: mnemonic1 }).publicParentKey()
const account2 = await InMemoryKeyManager(RustCardano, { password: '', mnemonic: mnemonic2 }).publicParentKey()
const account2Addresses = addressDiscoveryWithinBounds(RustCardano, {
account: account2,
lowerBound: 0,
upperBound: 39,
type: AddressType.external
}, ChainSettings.mainnet)
const tx1 = generateTestTransaction({
account: account1,
testInputs: [
{ type: AddressType.external, value: '1000000' },
{ type: AddressType.external, value: '2000000' },
{ type: AddressType.external, value: '3000000' },
{ type: AddressType.external, value: '2000000' },
{ type: AddressType.external, value: '1000000' }
],
lowerBoundOfAddresses: 0,
testOutputs: [
{ address: account2Addresses[0].address, value: '400000' },
{ address: account2Addresses[5].address, value: '200000' },
{ address: account2Addresses[10].address, value: '200000' }
]
})
const tx2 = generateTestTransaction({
account: account1,
testInputs: [
{ type: AddressType.external, value: '1000000' },
{ type: AddressType.external, value: '2000000' },
{ type: AddressType.external, value: '3000000' },
{ type: AddressType.external, value: '2000000' },
{ type: AddressType.external, value: '1000000' }
],
lowerBoundOfAddresses: 4,
testOutputs: [
{ address: account2Addresses[15].address, value: '400000' },
{ address: account2Addresses[20].address, value: '200000' },
{ address: account2Addresses[25].address, value: '200000' }
]
})
const account1Utxos: Utxo[] = tx1.inputs.concat(tx2.inputs).map(input => {
return { address: input.value.address, id: tx1.transaction.id(), index: 0, value: '10000000' }
})
const account2Utxos: Utxo[] = [
{ id: tx1.transaction.id(), index: 0, address: account2Addresses[0].address, value: '200000' },
{ id: tx1.transaction.id(), index: 1, address: account2Addresses[5].address, value: '200000' },
{ id: tx1.transaction.id(), index: 2, address: account2Addresses[10].address, value: '200000' },
{ id: tx2.transaction.id(), index: 3, address: account2Addresses[15].address, value: '200000' },
{ id: tx2.transaction.id(), index: 4, address: account2Addresses[20].address, value: '200000' },
{ id: tx2.transaction.id(), index: 5, address: account2Addresses[25].address, value: '200000' }
]
return {
accountMnemonics: {
account1: mnemonic2,
account2: mnemonic3
},
transactions: [
{ id: tx1.transaction.id(), inputs: tx1.inputs, outputs: tx1.transaction.toJson().outputs.map((txOut: any) => ({ address: txOut.address, value: String(txOut.value) })) },
{ id: tx2.transaction.id(), inputs: tx2.inputs, outputs: tx2.transaction.toJson().outputs.map((txOut: any) => ({ address: txOut.address, value: String(txOut.value) })) }
],
utxos: account1Utxos.concat(account2Utxos)
}
}
// Uncomment and run this file with `npx ts-node src/test/utils/seed.ts` to regenerate the seed
// generateSeed().then(r => {
// console.log(JSON.stringify(r, null, 4))
// })
export const seed = {
'accountMnemonics': {
'account1': 'tattoo jar maid bus found common else seminar gym choose flip habit',
'account2': 'topple easy myth stay glad prison friend display feature antenna marble luxury'
},
'transactions': [
{
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'inputs': [
{
'pointer': {
'id': '54dc5ff4cd5606ad2914cd0f121951e584d48047591514a4384835823d9eb514',
'index': 0
},
'value': {
'address': 'Ae2tdPwUPEZLVDTRwkaiXxym6fGpzarZB6JriCYA2jnqP7QvqTLPcYhveai',
'value': '1000000'
},
'addressing': {
'change': 0,
'index': 0,
'accountIndex': 0
}
},
{
'pointer': {
'id': 'fb869bce80f2baa1b10cb360d677b7dbaba56a6af3607b855b73848de24915b1',
'index': 1
},
'value': {
'address': 'Ae2tdPwUPEZLWgc7bd75gdNfr24NSFR4X2mEghXLuji6brXGTCFo7vUCTma',
'value': '2000000'
},
'addressing': {
'change': 0,
'index': 1,
'accountIndex': 0
}
},
{
'pointer': {
'id': '647cc6c52e192bbaf1ac71226df34f724606f8fe5b81f6f2aa5ad6868665a7fd',
'index': 2
},
'value': {
'address': 'Ae2tdPwUPEZ3ruXWDNQh7squLAUfDNoK6SMToVHLK26f3sgMNJC1HLgT626',
'value': '3000000'
},
'addressing': {
'change': 0,
'index': 2,
'accountIndex': 0
}
},
{
'pointer': {
'id': '5efe355e26734b0e1ee23f61b69bd5e5fc145cf4853d8c5ec8c13d37bf3c8655',
'index': 3
},
'value': {
'address': 'Ae2tdPwUPEZ3q7rUnMdpDhjWH5xkXPdbGW5hhoWRuPM9rhAX9G5XVHLZds1',
'value': '2000000'
},
'addressing': {
'change': 0,
'index': 3,
'accountIndex': 0
}
},
{
'pointer': {
'id': '39954ea98e3d2af94e609020159778031b52c3334428be803b8703e2b555f67d',
'index': 4
},
'value': {
'address': 'Ae2tdPwUPEZHXuQpkx3nQXfrcr7dYipVGBWvZPG75tozqY2QL589R8C1zvW',
'value': '1000000'
},
'addressing': {
'change': 0,
'index': 4,
'accountIndex': 0
}
}
],
'outputs': [
{
'address': 'Ae2tdPwUPEZGBjLGCz5zodwQSQm21QrXMZFsFR3iFJyFVAYLK6XUUfZRNVu',
'value': '198036'
},
{
'address': 'Ae2tdPwUPEZJSYtd7jXPqduaKBdxTLKnXY8eTDJrWegvB7rAoS5EFDya6Eh',
'value': '200000'
},
{
'address': 'Ae2tdPwUPEZ1HJib4J5jjte5wfcs3zx31vr1AJSnmJoFrdvdtRYB2T9Jdc6',
'value': '200000'
}
]
},
{
'id': '15d8f7e8c46aea31e1d38ca1144ce5e4a60926d811968980d47ff9b9e835e01a',
'inputs': [
{
'pointer': {
'id': '16314dce8abf6973e114f24ec75a4376d918a3305127971ad8af989d63d01fc6',
'index': 0
},
'value': {
'address': 'Ae2tdPwUPEZHXuQpkx3nQXfrcr7dYipVGBWvZPG75tozqY2QL589R8C1zvW',
'value': '1000000'
},
'addressing': {
'change': 0,
'index': 4,
'accountIndex': 0
}
},
{
'pointer': {
'id': '7d26a202cb50a0df98bc1749d8ddcb47fb545192f42294a6a12131883577878f',
'index': 1
},
'value': {
'address': 'Ae2tdPwUPEZJnWY4TjFiL9meehpAEtnyY39orQrPkpeJGvrPfXAoKcvqFUQ',
'value': '2000000'
},
'addressing': {
'change': 0,
'index': 5,
'accountIndex': 0
}
},
{
'pointer': {
'id': '851f2931b4640e91cb03a4fda994c5383dee1a5350ebc87ad25c14fbdd5b6c34',
'index': 2
},
'value': {
'address': 'Ae2tdPwUPEZ6guYM1SN2uhPZnWVKTz91epqeTdvSfJ4ThxfBe59p4xhgVae',
'value': '3000000'
},
'addressing': {
'change': 0,
'index': 6,
'accountIndex': 0
}
},
{
'pointer': {
'id': '3a5c577a20161d0280506df9bd77feaae9d9d1e0b9eb4ee5fe593ab6e5af40a6',
'index': 3
},
'value': {
'address': 'Ae2tdPwUPEZ6JDK3Cisn8YYENZM8xxsDPQAe1SG7nYg2wRiUdb1GuT9zaYH',
'value': '2000000'
},
'addressing': {
'change': 0,
'index': 7,
'accountIndex': 0
}
},
{
'pointer': {
'id': '5f5257fe52d5b4a673002af2d43d89fd875d6d3f4a3b78182a0aee07eb657085',
'index': 4
},
'value': {
'address': 'Ae2tdPwUPEYzTfq1F7xf2SGaiJyPPvZFnVqPzjsUWpjrcjzspU8J2WXqzEE',
'value': '1000000'
},
'addressing': {
'change': 0,
'index': 8,
'accountIndex': 0
}
}
],
'outputs': [
{
'address': 'Ae2tdPwUPEYztzVbFx9o4sjiXEJ9X1Ro1RVhVABLzcn9ptW7iVfYzoWrS1j',
'value': '198036'
},
{
'address': 'Ae2tdPwUPEZNHdsZxrSgsuunfXKETPjf37Z68VLDAvC2F5KjqZSr3MPHt6L',
'value': '200000'
},
{
'address': 'Ae2tdPwUPEZ65KZW6P6ZGEDUJaBWiNiDge1fx48e66Fw7gv1CQ2cmWoL55M',
'value': '200000'
}
]
}
],
'utxos': [
{
'address': 'Ae2tdPwUPEZLVDTRwkaiXxym6fGpzarZB6JriCYA2jnqP7QvqTLPcYhveai',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZLWgc7bd75gdNfr24NSFR4X2mEghXLuji6brXGTCFo7vUCTma',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZ3ruXWDNQh7squLAUfDNoK6SMToVHLK26f3sgMNJC1HLgT626',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZ3q7rUnMdpDhjWH5xkXPdbGW5hhoWRuPM9rhAX9G5XVHLZds1',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZHXuQpkx3nQXfrcr7dYipVGBWvZPG75tozqY2QL589R8C1zvW',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZHXuQpkx3nQXfrcr7dYipVGBWvZPG75tozqY2QL589R8C1zvW',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZJnWY4TjFiL9meehpAEtnyY39orQrPkpeJGvrPfXAoKcvqFUQ',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZ6guYM1SN2uhPZnWVKTz91epqeTdvSfJ4ThxfBe59p4xhgVae',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEZ6JDK3Cisn8YYENZM8xxsDPQAe1SG7nYg2wRiUdb1GuT9zaYH',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'address': 'Ae2tdPwUPEYzTfq1F7xf2SGaiJyPPvZFnVqPzjsUWpjrcjzspU8J2WXqzEE',
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'value': '10000000'
},
{
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 0,
'address': 'Ae2tdPwUPEZGBjLGCz5zodwQSQm21QrXMZFsFR3iFJyFVAYLK6XUUfZRNVu',
'value': '200000'
},
{
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 1,
'address': 'Ae2tdPwUPEZJSYtd7jXPqduaKBdxTLKnXY8eTDJrWegvB7rAoS5EFDya6Eh',
'value': '200000'
},
{
'id': 'a3f78a5855a6f0ab16b98f3c060530d844038b49c95bd259ded57d148397a2f0',
'index': 2,
'address': 'Ae2tdPwUPEZ1HJib4J5jjte5wfcs3zx31vr1AJSnmJoFrdvdtRYB2T9Jdc6',
'value': '200000'
},
{
'id': '15d8f7e8c46aea31e1d38ca1144ce5e4a60926d811968980d47ff9b9e835e01a',
'index': 3,
'address': 'Ae2tdPwUPEYztzVbFx9o4sjiXEJ9X1Ro1RVhVABLzcn9ptW7iVfYzoWrS1j',
'value': '200000'
},
{
'id': '15d8f7e8c46aea31e1d38ca1144ce5e4a60926d811968980d47ff9b9e835e01a',
'index': 4,
'address': 'Ae2tdPwUPEZNHdsZxrSgsuunfXKETPjf37Z68VLDAvC2F5KjqZSr3MPHt6L',
'value': '200000'
},
{
'id': '15d8f7e8c46aea31e1d38ca1144ce5e4a60926d811968980d47ff9b9e835e01a',
'index': 5,
'address': 'Ae2tdPwUPEZ65KZW6P6ZGEDUJaBWiNiDge1fx48e66Fw7gv1CQ2cmWoL55M',
'value': '200000'
}
]
} | the_stack |
import * as AdaptiveCards from "adaptivecards";
import { Helpers } from './common/helpers';
import { GetActivitiesMessage } from './common/messages/get-activities-message';
import { GetRemoteDevicesMessage } from './common/messages/get-remote-devices-message';
import { LoginMessage } from './common/messages/login-message';
import { LogoutMessage } from './common/messages/logout-message';
import { Message } from './common/messages/message';
import { OpenOnRemoteDeviceMessage } from './common/messages/open-on-remote-device-message';
let minimumTimeOnPage: number = 8;
let accessToken: string;
let theme: string = 'dark';
// Get local preferences
chrome.storage.local.get('access_token', (data) => {
if (data.access_token !== null) {
accessToken = data.access_token;
UpdateLoginState();
}
});
// Get synced preferences
(chrome.storage.sync || chrome.storage.local).get(['min_sec_loaded', 'theme'], (data) => {
// Min time on page
if (data.min_sec_loaded) {
minimumTimeOnPage = data.min_sec_loaded;
updateMinimumTimeOnPage();
}
// The theme
if (data.theme) {
theme = data.theme;
document.body.classList.add(data.theme);
(document.getElementById('setting-selected-theme') as HTMLSelectElement).value = data.theme;
} else {
document.body.classList.add(theme);
(document.getElementById('setting-selected-theme') as HTMLSelectElement).value = theme;
}
});
function updateMinimumTimeOnPage() {
(document.getElementById('setting-timeout') as HTMLInputElement).value = minimumTimeOnPage.toString();
}
function setMinimumTimeOnPage(value: number) {
(chrome.storage.sync || chrome.storage.local).set({
min_sec_loaded : value
}, null);
minimumTimeOnPage = value;
}
// Update the UI to reflect the internal login state.
function UpdateLoginState() {
document.getElementById('section-login-pending').style.display = accessToken ? 'none' : 'block';
document.getElementById('section-login-completed').style.display = accessToken ? 'block' : 'none';
}
// Run when document has loaded
document.addEventListener('DOMContentLoaded', () => {
// Translate the page
Helpers.LocalizeHtmlPage();
// Set some initial views and states
UpdateLoginState();
updateMinimumTimeOnPage();
// When the user clicks on the activities button
attachClickEvent('view-activities', () => {
// Update UI
document.getElementById('section-main').style.display = 'none';
document.getElementById('section-activities').style.display = 'block';
// We are loading
document.getElementById('activity-status').innerText = Helpers.GetLocalizedString("LoadingText");
document.getElementById('activity-status').style.display = 'block';
// Send a message to get activities
Message.sendMessageWithResult<any>(new GetActivitiesMessage(), (data) => {
// If the message was not successful, say why
if (!data.success) {
document.getElementById('activity-status').innerText = data.reason;
return;
}
// If the array is empty
if (data.payload.length === 0) {
document.getElementById('activity-status').innerText = Helpers.GetLocalizedString("ActivitiesNoRecent");
return;
}
// We have loaded
document.getElementById('activity-status').innerText = '';
document.getElementById('activity-status').style.display = 'none';
document.getElementById('activities-holder').innerHTML = '';
// Get the div that holds all the activities
const activitiesHolder = document.getElementById('activities-holder');
// Loop through all the activities
data.payload.forEach((activity) => {
// Create an AdaptiveCard instance
const adaptiveCard = new AdaptiveCards.AdaptiveCard();
// Host Config defines the style and behavior of a card
adaptiveCard.hostConfig = new AdaptiveCards.HostConfig({
containerStyles: {
default: {
backgroundColor: "#FF37474F",
foregroundColors: {
default: {
default: "#FFEEEEEE",
subtle: "#FFEEEEEE"
}
}
}
},
fontFamily: "Segoe UI, Helvetica Neue, sans-serif"
});
// Set the version so the card wil render
activity.visualElements.content.version = "1.0";
// Parse the card payload
adaptiveCard.parse(activity.visualElements.content);
// Render the card to an HTML element:
const renderedCard = adaptiveCard.render();
// Insert the card
activitiesHolder.insertAdjacentHTML('beforeend', '<a class="activity-card" target="_blank" href="' + activity.activationUrl + '">' + renderedCard.outerHTML + '</a>');
});
});
});
// When the user clicks on the devices button
attachClickEvent('view-devices', () => {
// Update UI
document.getElementById('section-main').style.display = 'none';
document.getElementById('section-remote').style.display = 'block';
// We are loading
document.getElementById('remote-status').innerText = Helpers.GetLocalizedString("LoadingText");
document.getElementById('remote-status').style.display = 'block';
// Send a message to get the devices
Message.sendMessageWithResult<any>(new GetRemoteDevicesMessage(), (data) => {
console.log(data);
// If the message was not successful, say why
if (!data.success) {
document.getElementById('remote-status').innerText = data.reason;
return;
}
// If the array is empty
if (data.payload.length === 0) {
document.getElementById('remote-status').innerText = Helpers.GetLocalizedString("ProjectRomeNoDevices");
return;
}
// We have loaded
document.getElementById('remote-status').innerText = '';
document.getElementById('remote-status').style.display = 'none';
document.getElementById('devices-holder').innerHTML = '';
// Get the div that holds all the devices
const devicesHolder = document.getElementById('devices-holder');
// Loop through all the devices
data.payload.forEach((device) => {
// Get the description
const description = device.Model == null ? device.Platform + ' Device' : device.Model;
// Update UI
devicesHolder.insertAdjacentHTML('beforeend', '<a id="remote-device-' + device.id + '" class="inline-button"><div class="status-icon ' + device.Status + '"></div><p>' + device.Name + '<p class="description">' + description + '</p></p></a>');
// Attach click event to item
attachClickEvent('remote-device-' + device.id, () => {
// Query for the active tab in this window
chrome.tabs.query({active: true, currentWindow: true}, (tabs) => {
// Send the message and then close the popup
Message.sendMessage(new OpenOnRemoteDeviceMessage(device.id, tabs[0].url));
closeMenu();
});
});
});
});
});
// When the user clicks the settings button
attachClickEvent('settings', () => {
document.getElementById('section-main').style.display = 'none';
document.getElementById('section-settings').style.display = 'block';
});
// When the user closes the settings pane
attachClickEvent('exit-settings', () => {
document.getElementById('section-main').style.display = 'block';
document.getElementById('section-settings').style.display = 'none';
});
// When the user clicks on the about button
attachClickEvent('about', () => {
document.getElementById('section-main').style.display = 'none';
document.getElementById('section-about').style.display = 'block';
});
// When the user closes the about pane
attachClickEvent('exit-about', () => {
document.getElementById('section-main').style.display = 'block';
document.getElementById('section-about').style.display = 'none';
});
// When the user closes the devices pane
attachClickEvent('exit-remote', () => {
document.getElementById('section-main').style.display = 'block';
document.getElementById('section-remote').style.display = 'none';
});
// When the user closes the activities pane
attachClickEvent('exit-activities', () => {
document.getElementById('section-main').style.display = 'block';
document.getElementById('section-activities').style.display = 'none';
});
// login flow
attachClickEvent('login', () => {
// Send the message and close
Message.sendMessage(new LoginMessage());
closeMenu();
});
// log out flow
attachClickEvent('logout', () => {
// Send the message and close
Message.sendMessage(new LogoutMessage('User initiated logout'));
closeMenu();
});
// When the user changes the timeout field
document.getElementById('setting-timeout').addEventListener('change', () => {
setMinimumTimeOnPage(Number((document.getElementById('setting-timeout') as HTMLInputElement).value));
});
// When the user changes the theme combo box
document.getElementById('setting-selected-theme').addEventListener('change', () => {
// Get the new theme
const newTheme = (document.getElementById('setting-selected-theme') as HTMLSelectElement).value;
// Save the theme
(chrome.storage.sync || chrome.storage.local).set({
theme : newTheme
}, null);
// Remove old theme
document.body.classList.remove(theme);
// Update variable
theme = newTheme;
// Add new theme
document.body.classList.add(theme);
});
});
function attachClickEvent(elementName: string, callback: (ev: MouseEvent) => any) {
document.getElementById(elementName).addEventListener('click', callback);
}
function closeMenu() {
if (typeof browser === 'undefined' || !browser) {
chrome.runtime.getPlatformInfo((platInfo) => {
if (platInfo.os === 'android') {
chrome.tabs.update({active: true});
} else {
window.close();
}
});
} else {
browser.runtime.getPlatformInfo().then((platInfo) => {
if (platInfo.os === 'android') {
// TODO, USE BELOW
// browser.tabs.update({active: true});
chrome.tabs.update({active: true});
} else {
window.close();
}
});
}
} | the_stack |
import Page = require('../../../../base/Page');
import Response = require('../../../../http/response');
import V1 = require('../../V1');
import { SerializableClass } from '../../../../interfaces';
type TrustProductsEvaluationsStatus = 'compliant'|'noncompliant';
/**
* Initialize the TrustProductsEvaluationsList
*
* @param version - Version of the resource
* @param trustProductSid - The unique string that identifies the resource
*/
declare function TrustProductsEvaluationsList(version: V1, trustProductSid: string): TrustProductsEvaluationsListInstance;
interface TrustProductsEvaluationsListInstance {
/**
* @param sid - sid of instance
*/
(sid: string): TrustProductsEvaluationsContext;
/**
* create a TrustProductsEvaluationsInstance
*
* @param opts - Options for request
* @param callback - Callback to handle processed record
*/
create(opts: TrustProductsEvaluationsListInstanceCreateOptions, callback?: (error: Error | null, item: TrustProductsEvaluationsInstance) => any): Promise<TrustProductsEvaluationsInstance>;
/**
* Streams TrustProductsEvaluationsInstance records from the API.
*
* This operation lazily loads records as efficiently as possible until the limit
* is reached.
*
* The results are passed into the callback function, so this operation is memory
* efficient.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param callback - Function to process each record
*/
each(callback?: (item: TrustProductsEvaluationsInstance, done: (err?: Error) => void) => void): void;
/**
* Streams TrustProductsEvaluationsInstance records from the API.
*
* This operation lazily loads records as efficiently as possible until the limit
* is reached.
*
* The results are passed into the callback function, so this operation is memory
* efficient.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param opts - Options for request
* @param callback - Function to process each record
*/
each(opts?: TrustProductsEvaluationsListInstanceEachOptions, callback?: (item: TrustProductsEvaluationsInstance, done: (err?: Error) => void) => void): void;
/**
* Constructs a trust_products_evaluations
*
* @param sid - The unique string that identifies the Evaluation resource
*/
get(sid: string): TrustProductsEvaluationsContext;
/**
* Retrieve a single target page of TrustProductsEvaluationsInstance records from
* the API.
*
* The request is executed immediately.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param callback - Callback to handle list of records
*/
getPage(callback?: (error: Error | null, items: TrustProductsEvaluationsPage) => any): Promise<TrustProductsEvaluationsPage>;
/**
* Retrieve a single target page of TrustProductsEvaluationsInstance records from
* the API.
*
* The request is executed immediately.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param targetUrl - API-generated URL for the requested results page
* @param callback - Callback to handle list of records
*/
getPage(targetUrl?: string, callback?: (error: Error | null, items: TrustProductsEvaluationsPage) => any): Promise<TrustProductsEvaluationsPage>;
/**
* Lists TrustProductsEvaluationsInstance records from the API as a list.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param callback - Callback to handle list of records
*/
list(callback?: (error: Error | null, items: TrustProductsEvaluationsInstance[]) => any): Promise<TrustProductsEvaluationsInstance[]>;
/**
* Lists TrustProductsEvaluationsInstance records from the API as a list.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param opts - Options for request
* @param callback - Callback to handle list of records
*/
list(opts?: TrustProductsEvaluationsListInstanceOptions, callback?: (error: Error | null, items: TrustProductsEvaluationsInstance[]) => any): Promise<TrustProductsEvaluationsInstance[]>;
/**
* Retrieve a single page of TrustProductsEvaluationsInstance records from the API.
*
* The request is executed immediately.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param callback - Callback to handle list of records
*/
page(callback?: (error: Error | null, items: TrustProductsEvaluationsPage) => any): Promise<TrustProductsEvaluationsPage>;
/**
* Retrieve a single page of TrustProductsEvaluationsInstance records from the API.
*
* The request is executed immediately.
*
* If a function is passed as the first argument, it will be used as the callback
* function.
*
* @param opts - Options for request
* @param callback - Callback to handle list of records
*/
page(opts?: TrustProductsEvaluationsListInstancePageOptions, callback?: (error: Error | null, items: TrustProductsEvaluationsPage) => any): Promise<TrustProductsEvaluationsPage>;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
}
/**
* Options to pass to create
*
* @property policySid - The unique string of a policy
*/
interface TrustProductsEvaluationsListInstanceCreateOptions {
policySid: string;
}
/**
* Options to pass to each
*
* @property callback -
* Function to process each record. If this and a positional
* callback are passed, this one will be used
* @property done - Function to be called upon completion of streaming
* @property limit -
* Upper limit for the number of records to return.
* each() guarantees never to return more than limit.
* Default is no limit
* @property pageSize -
* Number of records to fetch per request,
* when not set will use the default value of 50 records.
* If no pageSize is defined but a limit is defined,
* each() will attempt to read the limit with the most efficient
* page size, i.e. min(limit, 1000)
*/
interface TrustProductsEvaluationsListInstanceEachOptions {
callback?: (item: TrustProductsEvaluationsInstance, done: (err?: Error) => void) => void;
done?: Function;
limit?: number;
pageSize?: number;
}
/**
* Options to pass to list
*
* @property limit -
* Upper limit for the number of records to return.
* list() guarantees never to return more than limit.
* Default is no limit
* @property pageSize -
* Number of records to fetch per request,
* when not set will use the default value of 50 records.
* If no page_size is defined but a limit is defined,
* list() will attempt to read the limit with the most
* efficient page size, i.e. min(limit, 1000)
*/
interface TrustProductsEvaluationsListInstanceOptions {
limit?: number;
pageSize?: number;
}
/**
* Options to pass to page
*
* @property pageNumber - Page Number, this value is simply for client state
* @property pageSize - Number of records to return, defaults to 50
* @property pageToken - PageToken provided by the API
*/
interface TrustProductsEvaluationsListInstancePageOptions {
pageNumber?: number;
pageSize?: number;
pageToken?: string;
}
interface TrustProductsEvaluationsPayload extends TrustProductsEvaluationsResource, Page.TwilioResponsePayload {
}
interface TrustProductsEvaluationsResource {
account_sid: string;
date_created: Date;
policy_sid: string;
results: object[];
sid: string;
status: TrustProductsEvaluationsStatus;
trust_product_sid: string;
url: string;
}
interface TrustProductsEvaluationsSolution {
trustProductSid?: string;
}
declare class TrustProductsEvaluationsContext {
/**
* Initialize the TrustProductsEvaluationsContext
*
* @param version - Version of the resource
* @param trustProductSid - The unique string that identifies the resource
* @param sid - The unique string that identifies the Evaluation resource
*/
constructor(version: V1, trustProductSid: string, sid: string);
/**
* fetch a TrustProductsEvaluationsInstance
*
* @param callback - Callback to handle processed record
*/
fetch(callback?: (error: Error | null, items: TrustProductsEvaluationsInstance) => any): Promise<TrustProductsEvaluationsInstance>;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
}
declare class TrustProductsEvaluationsInstance extends SerializableClass {
/**
* Initialize the TrustProductsEvaluationsContext
*
* @param version - Version of the resource
* @param payload - The instance payload
* @param trustProductSid - The unique string that identifies the resource
* @param sid - The unique string that identifies the Evaluation resource
*/
constructor(version: V1, payload: TrustProductsEvaluationsPayload, trustProductSid: string, sid: string);
private _proxy: TrustProductsEvaluationsContext;
accountSid: string;
dateCreated: Date;
/**
* fetch a TrustProductsEvaluationsInstance
*
* @param callback - Callback to handle processed record
*/
fetch(callback?: (error: Error | null, items: TrustProductsEvaluationsInstance) => any): Promise<TrustProductsEvaluationsInstance>;
policySid: string;
results: object[];
sid: string;
status: TrustProductsEvaluationsStatus;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
trustProductSid: string;
url: string;
}
declare class TrustProductsEvaluationsPage extends Page<V1, TrustProductsEvaluationsPayload, TrustProductsEvaluationsResource, TrustProductsEvaluationsInstance> {
/**
* Initialize the TrustProductsEvaluationsPage
*
* @param version - Version of the resource
* @param response - Response from the API
* @param solution - Path solution
*/
constructor(version: V1, response: Response<string>, solution: TrustProductsEvaluationsSolution);
/**
* Build an instance of TrustProductsEvaluationsInstance
*
* @param payload - Payload response from the API
*/
getInstance(payload: TrustProductsEvaluationsPayload): TrustProductsEvaluationsInstance;
/**
* Provide a user-friendly representation
*/
toJSON(): any;
}
export { TrustProductsEvaluationsContext, TrustProductsEvaluationsInstance, TrustProductsEvaluationsList, TrustProductsEvaluationsListInstance, TrustProductsEvaluationsListInstanceCreateOptions, TrustProductsEvaluationsListInstanceEachOptions, TrustProductsEvaluationsListInstanceOptions, TrustProductsEvaluationsListInstancePageOptions, TrustProductsEvaluationsPage, TrustProductsEvaluationsPayload, TrustProductsEvaluationsResource, TrustProductsEvaluationsSolution, TrustProductsEvaluationsStatus } | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.