text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
import { ILayoutSettings } from 'app-shared/types';
import * as FormLayoutTypes from '../formDesignerTypes';
import { actions, moduleName } from './formLayoutActions';
import { getLayoutSettingsSchemaUrl } from '../../../utils/urlHelper';
import { sortArray } from '../../../utils/arrayHelpers/arrayLogic';
export interface IFormLayoutState extends IFormDesignerLayout {
fetching: boolean;
fetched: boolean;
error: Error;
saving: boolean;
unSavedChanges: boolean;
activeContainer: string;
activeList: any;
selectedLayout: string;
layoutSettings: ILayoutSettings;
}
const initialState: IFormLayoutState = {
layouts: {},
fetching: false,
fetched: false,
error: null,
saving: false,
unSavedChanges: false,
activeContainer: '',
activeList: [],
selectedLayout: 'default',
layoutSettings: { $schema: getLayoutSettingsSchemaUrl(), pages: { order: [] } },
};
const formLayoutSlice = createSlice({
name: moduleName,
initialState,
reducers: {
addActiveFormContainerFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddActiveFormContainerAction>) => {
const { containerId, callback } = action.payload;
if (callback) {
callback(containerId);
}
state.activeContainer = containerId;
},
addApplicationMetadataRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
addFormComponentFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddFormComponentActionFulfilled>) => {
const {
component,
id,
position,
containerId,
callback,
} = action.payload;
if (callback) {
callback(component, id);
}
const selectedLayout = state.layouts[state.selectedLayout];
selectedLayout.components[id] = component;
if (!selectedLayout.order[containerId]) {
selectedLayout.order[containerId] = [];
}
state.layouts[state.selectedLayout].order[containerId].splice(position, 0, id);
},
addFormComponentRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
addFormComponentsFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddFormComponentsActionFulfilled>) => {
const {
components,
ids,
position,
containerId,
callback,
} = action.payload;
if (callback) {
callback(components, ids);
}
const existingComponents = state.layouts[state.selectedLayout].components;
state.layouts[state.selectedLayout].components = {
...existingComponents,
...components,
};
state.layouts[state.selectedLayout].order[containerId].splice(position, 0, ...ids);
},
addFormContainerFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddFormContainerActionFulfilled>) => {
const {
container,
id,
positionAfterId,
addToId,
baseContainerId,
callback,
destinationIndex,
} = action.payload;
if (callback) {
callback(container, id);
}
const selectedLayout = state.layouts[state.selectedLayout];
selectedLayout.containers[id] = container;
selectedLayout.order[id] = [];
if (!baseContainerId) return;
if (addToId) {
if (!destinationIndex === false || destinationIndex === 0) {
selectedLayout.order[addToId].splice(destinationIndex, 0, id);
} else {
selectedLayout.order[addToId].push(id);
}
if (positionAfterId) {
selectedLayout.order[baseContainerId].splice(
selectedLayout.order[baseContainerId].indexOf(positionAfterId) + 1, 0, id,
);
}
} else if (!destinationIndex === false || destinationIndex === 0) {
selectedLayout.order[baseContainerId].splice(destinationIndex, 0, id);
} else {
selectedLayout.order[baseContainerId].push(id);
}
},
addFormContainerRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
addLayoutFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddLayoutFulfilledAction>) => {
const { layouts, layoutOrder } = action.payload;
state.layouts = layouts;
state.layoutSettings.pages.order = layoutOrder;
},
addLayoutRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
addWidgetFulfilled: (state, action: PayloadAction<FormLayoutTypes.IAddWidgetActionFulfilled>) => {
const {
components,
containerId,
layoutId,
containerOrder,
} = action.payload;
state.layouts[layoutId].components = components;
state.layouts[layoutId].order[containerId] = containerOrder;
},
addWidgetRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
deleteActiveListFulfilled: (state) => {
state.activeList = [];
},
deleteActiveListRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
deleteApplicationMetadataRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
deleteFormComponentFulfilled: (state, action: PayloadAction<FormLayoutTypes.IDeleteComponentAction>) => {
const { id, containerId } = action.payload;
const selectedLayout = state.layouts[state.selectedLayout];
delete selectedLayout.components[id];
selectedLayout.order[containerId].splice(
selectedLayout.order[containerId].indexOf(id), 1,
);
state.unSavedChanges = true;
state.error = null;
},
deleteFormComponentRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
deleteFormComponents: (state, action: PayloadAction<FormLayoutTypes.IDeleteComponentsAction>) => {
const { components } = action.payload;
const selectedLayout = state.layouts[state.selectedLayout];
components.forEach((id) => {
let containerId = Object.keys(selectedLayout.order)[0];
Object.keys(selectedLayout.order).forEach((cId) => {
if (selectedLayout.order[cId].find((componentId) => componentId === id)) {
containerId = cId;
}
});
delete selectedLayout.components[id];
selectedLayout.order[containerId].splice(
selectedLayout.order[containerId].indexOf(id), 1,
);
state.unSavedChanges = true;
state.error = null;
});
},
deleteFormContainerFulfilled: (state, action: PayloadAction<FormLayoutTypes.IDeleteContainerAction>) => {
const { id, parentContainerId } = action.payload;
const selectedLayout = state.layouts[state.selectedLayout];
delete selectedLayout.containers[id];
delete selectedLayout.order[id];
if (parentContainerId) {
selectedLayout.order[parentContainerId].splice(
selectedLayout.order[parentContainerId].indexOf(id), 1,
);
}
state.unSavedChanges = true;
state.error = null;
},
deleteFormContainerRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
deleteLayoutFulfilled: (state, action: PayloadAction<FormLayoutTypes.IDeleteLayoutAction>) => {
const { layout } = action.payload;
delete state.layouts[layout];
const pageOrder = state.layoutSettings.pages.order;
pageOrder.splice(pageOrder.indexOf(layout), 1);
if (state.selectedLayout === layout) {
state.selectedLayout = pageOrder[0];
}
},
deleteLayoutRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
fetchFormLayout: (state) => {
state.fetching = true;
state.fetched = false;
state.error = null;
},
fetchFormLayoutFulfilled: (state, action: PayloadAction<FormLayoutTypes.IFetchFormLayoutFulfilledAction>) => {
const { formLayout } = action.payload;
state.fetching = false;
state.fetched = true;
state.error = null;
if (formLayout) {
state.layouts = formLayout;
state.layoutSettings.pages.order = Object.keys(formLayout);
}
},
fetchFormLayoutRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.fetching = false;
state.fetched = false;
state.error = error;
},
fetchLayoutSettingsFulfilled: (
state,
action: PayloadAction<FormLayoutTypes.IFetchLayoutSettingsFulfilledAction>,
) => {
const { settings } = action.payload;
state.layoutSettings = settings;
},
fetchLayoutSettingsRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
saveFormLayout: (state) => {
state.saving = true;
},
saveFormLayoutFulfilled: (state) => {
state.saving = false;
state.unSavedChanges = false;
},
saveFormLayoutRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.saving = false;
state.unSavedChanges = true;
state.error = error;
},
updateActiveListFulfilled: (state, action: PayloadAction<FormLayoutTypes.IUpdateActiveListActionFulfilled>) => {
const { containerList } = action.payload;
state.activeList = containerList;
},
updateActiveListOrder: (state, action: PayloadAction<FormLayoutTypes.IUpdateActiveListOrderAction>) => {
const { containerList, orderList } = action.payload;
const key: any = Object.keys(orderList)[0];
const func = sortArray();
const returnedList = !containerList.length ? [] : func({ array: [...containerList], order: orderList[key] });
if (returnedList.length > 0) {
state.activeList = returnedList;
}
},
updateApplicationMetadataRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
updateContainerId: (state, action: PayloadAction<FormLayoutTypes.IUpdateContainerIdFulfilled>) => {
const { currentId, newId } = action.payload;
const selectedLayout = state.layouts[state.selectedLayout];
// update component id
selectedLayout.containers[newId] = { ...selectedLayout.containers[currentId] };
delete selectedLayout.containers[currentId];
// update id in parent container order
const parentContainer = Object.keys(selectedLayout.order).find((containerId: string) => {
return (selectedLayout.order[containerId].indexOf(currentId) > -1);
});
const parentContainerOrder = selectedLayout.order[parentContainer];
const containerIndex = parentContainerOrder.indexOf(currentId);
parentContainerOrder[containerIndex] = newId;
// update id of the containers order array
selectedLayout.order[newId] = selectedLayout.order[currentId];
delete selectedLayout.order[currentId];
},
// eslint-disable-next-line max-len
updateFormComponent: (state, action: PayloadAction<FormLayoutTypes.IUpdateFormComponentActionFulfilled>) => {
const { updatedComponent, id } = action.payload;
const selectedLayoutComponents = state.layouts[state.selectedLayout].components;
if (id !== updatedComponent.id) {
const newId = updatedComponent.id;
selectedLayoutComponents[newId] = {
...selectedLayoutComponents[id],
...updatedComponent,
};
delete selectedLayoutComponents[id];
// update id in parent container order
const selectedLayoutOrder = state.layouts[state.selectedLayout].order;
const parentContainer = Object.keys(selectedLayoutOrder).find((containerId) => {
return (selectedLayoutOrder[containerId].indexOf(id) > -1);
});
const parentContainerOrder = selectedLayoutOrder[parentContainer];
const containerIndex = parentContainerOrder.indexOf(id);
parentContainerOrder[containerIndex] = newId;
// update id of the containers order array
// selectedLayoutOrder[newId] = selectedLayoutOrder[id];
// delete selectedLayoutOrder[id];
} else {
selectedLayoutComponents[id] = {
...selectedLayoutComponents[id],
...updatedComponent,
};
}
state.unSavedChanges = true;
},
updateFormComponentOrder: (state, action: PayloadAction<FormLayoutTypes.IUpdateFormComponentOrderAction>) => {
const { updatedOrder } = action.payload;
state.layouts[state.selectedLayout].order = updatedOrder;
},
updateFormContainer: (state, action: PayloadAction<FormLayoutTypes.IUpdateFormContainerAction>) => {
const { updatedContainer, id } = action.payload;
const selectedLayoutContainers = state.layouts[state.selectedLayout].containers;
selectedLayoutContainers[id] = {
...selectedLayoutContainers[id],
...updatedContainer,
};
state.unSavedChanges = true;
},
updateLayoutNameFulfilled: (state, action: PayloadAction<FormLayoutTypes.IUpdateLayoutNameAction>) => {
const { oldName, newName } = action.payload;
state.layouts[newName] = { ...state.layouts[oldName] };
delete state.layouts[oldName];
const pageOrder = state.layoutSettings.pages.order;
pageOrder[pageOrder.indexOf(oldName)] = newName;
},
updateLayoutNameRejected: (state, action: PayloadAction<FormLayoutTypes.IFormDesignerActionRejected>) => {
const { error } = action.payload;
state.error = error;
},
updateLayoutOrder: (state, action: PayloadAction<FormLayoutTypes.IUpdateLayoutOrderAction>) => {
const { layout, direction } = action.payload;
const newOrder = [...state.layoutSettings.pages.order];
const currentIndex = state.layoutSettings.pages.order.indexOf(layout);
let destination: number;
if (direction === 'up') {
destination = currentIndex - 1;
} else if (direction === 'down') {
destination = currentIndex + 1;
}
newOrder.splice(currentIndex, 1);
newOrder.splice(destination, 0, layout);
state.layoutSettings.pages.order = newOrder;
},
updateSelectedLayout: (state, action: PayloadAction<FormLayoutTypes.IUpdateSelectedLayoutAction>) => {
const { selectedLayout } = action.payload;
state.selectedLayout = selectedLayout;
},
},
});
export const FormLayoutActions = {
...actions,
...formLayoutSlice.actions,
};
export default formLayoutSlice.reducer; | the_stack |
import * as child_process from 'child_process';
import { stream as fgStream } from 'fast-glob';
import fsModule, { promises as fs } from 'fs';
import gitUrlParse from 'git-url-parse';
import { homedir, tmpdir } from 'os';
import * as path from 'path';
import git from 'isomorphic-git';
import { mkdirP } from '@actions/io';
export type Console = {
readonly log: (...msg: unknown[]) => void;
readonly error: (...msg: unknown[]) => void;
readonly warn: (...msg: unknown[]) => void;
};
/**
* Custom wrapper around the child_process module
*/
export const exec = async (
cmd: string,
opts: {
env?: any;
cwd?: string;
log: Console;
}
) => {
const { log } = opts;
const env = opts?.env || {};
const ps = child_process.spawn('bash', ['-c', cmd], {
env: {
HOME: process.env.HOME,
...env,
},
cwd: opts.cwd,
stdio: ['pipe', 'pipe', 'pipe'],
});
const output = {
stderr: '',
stdout: '',
};
// We won't be providing any input to command
ps.stdin.end();
ps.stdout.on('data', (data) => {
output.stdout += data;
log.log(`data`, data.toString());
});
ps.stderr.on('data', (data) => {
output.stderr += data;
log.error(data.toString());
});
return new Promise<{
stderr: string;
stdout: string;
}>((resolve, reject) =>
ps.on('close', (code) => {
if (code !== 0) {
reject(
new Error('Process exited with code: ' + code + ':\n' + output.stderr)
);
} else {
resolve(output);
}
})
);
};
export interface EnvironmentVariables {
/**
* The URL of the repository to push to, either:
*
* * an ssh URL to a repository
* * the string `"self"`
*/
REPO?: string;
/**
* The name of the branch to push to
*/
BRANCH?: string;
/**
* Which subdirectory in the repository to we want to push as the contents of the branch
*/
FOLDER?: string;
/**
* The private key to use for publishing if REPO is an SSH repo
*/
SSH_PRIVATE_KEY?: string;
/**
* The file path of a known_hosts file with fingerprint of the relevant server
*/
KNOWN_HOSTS_FILE?: string;
/**
* The GITHUB_TOKEN secret
*/
GITHUB_TOKEN?: string;
/**
* Set to "true" to clear all of the history of the target branch and force push
*/
SQUASH_HISTORY?: string;
/**
* Set to "true" to avoid pushing commits that don't change any files.
*
* This is useful for example when you want to be able to easily identify
* which upstream changes resulted in changes to this repository.
*/
SKIP_EMPTY_COMMITS?: string;
/**
* An optional template string to use for the commit message,
* if not provided, a default template is used.
*
* A number of placeholders are available to use in template strings:
* * `{target-branch}` - the name of the target branch being updated
* * `{sha}` - the 7-character sha of the HEAD of the current branch
* * `{long-sha}` - the full sha of the HEAD of the current branch
* * `{msg}` - the commit message for the HEAD of the current branch
*/
MESSAGE?: string;
/**
* An optional path to a file to use as a list of globs defining which files
* to delete when clearing the target branch
*/
CLEAR_GLOBS_FILE?: string;
/**
* An optional string in git-check-ref-format to use for tagging the commit
*/
TAG?: string;
// Implicit environment variables passed by GitHub
GITHUB_REPOSITORY?: string;
GITHUB_EVENT_PATH?: string;
/** The name of the person / app that that initiated the workflow */
GITHUB_ACTOR?: string;
}
declare global {
namespace NodeJS {
interface ProcessEnv extends EnvironmentVariables {}
}
}
const DEFAULT_MESSAGE = 'Update {target-branch} to output generated at {sha}';
// Error messages
const KNOWN_HOSTS_WARNING = `
##[warning] KNOWN_HOSTS_FILE not set
This will probably mean that host verification will fail later on
`;
const KNOWN_HOSTS_ERROR = (host: string) => `
##[error] Host key verification failed!
This is probably because you forgot to supply a value for KNOWN_HOSTS_FILE
or the file is invalid or doesn't correctly verify the host ${host}
`;
const SSH_KEY_ERROR = `
##[error] Permission denied (publickey)
Make sure that the ssh private key is set correctly, and
that the public key has been added to the target repo
`;
const INVALID_KEY_ERROR = `
##[error] Error loading key: invalid format
Please check that you're setting the environment variable
SSH_PRIVATE_KEY correctly
`;
// Paths
const REPO_SELF = 'self';
const RESOURCES = path.join(path.dirname(__dirname), 'resources');
const KNOWN_HOSTS_GITHUB = path.join(RESOURCES, 'known_hosts_github.com');
const SSH_FOLDER = path.join(homedir(), '.ssh');
const KNOWN_HOSTS_TARGET = path.join(SSH_FOLDER, 'known_hosts');
const SSH_AGENT_PID_EXTRACT = /SSH_AGENT_PID=([0-9]+);/;
interface BaseConfig {
branch: string;
folder: string;
repo: string;
squashHistory: boolean;
skipEmptyCommits: boolean;
message: string;
tag?: string;
}
interface SshConfig extends BaseConfig {
mode: 'ssh';
parsedUrl: gitUrlParse.GitUrl;
privateKey: string;
knownHostsFile?: string;
}
interface SelfConfig extends BaseConfig {
mode: 'self';
}
type Config = SshConfig | SelfConfig;
/**
* The GitHub event that triggered this action
*/
export interface Event {
pusher?: {
email?: string;
name?: string;
};
}
const genConfig: (env?: EnvironmentVariables) => Config = (
env = process.env
) => {
if (!env.REPO) throw new Error('REPO must be specified');
if (!env.BRANCH) throw new Error('BRANCH must be specified');
if (!env.FOLDER) throw new Error('FOLDER must be specified');
const repo = env.REPO;
const branch = env.BRANCH;
const folder = env.FOLDER;
const squashHistory = env.SQUASH_HISTORY === 'true';
const skipEmptyCommits = env.SKIP_EMPTY_COMMITS === 'true';
const message = env.MESSAGE || DEFAULT_MESSAGE;
const tag = env.TAG;
// Determine the type of URL
if (repo === REPO_SELF) {
if (!env.GITHUB_TOKEN)
throw new Error('GITHUB_TOKEN must be specified when REPO == self');
if (!env.GITHUB_REPOSITORY)
throw new Error('GITHUB_REPOSITORY must be specified when REPO == self');
const url = `https://x-access-token:${env.GITHUB_TOKEN}@github.com/${env.GITHUB_REPOSITORY}.git`;
const config: Config = {
repo: url,
branch,
folder,
squashHistory,
skipEmptyCommits,
mode: 'self',
message,
tag,
};
return config;
}
const parsedUrl = gitUrlParse(repo);
if (parsedUrl.protocol === 'ssh') {
if (!env.SSH_PRIVATE_KEY)
throw new Error('SSH_PRIVATE_KEY must be specified when REPO uses ssh');
const config: Config = {
repo,
branch,
folder,
squashHistory,
skipEmptyCommits,
mode: 'ssh',
parsedUrl,
privateKey: env.SSH_PRIVATE_KEY,
knownHostsFile: env.KNOWN_HOSTS_FILE,
message,
tag,
};
return config;
}
throw new Error('Unsupported REPO URL');
};
const writeToProcess = (
command: string,
args: string[],
opts: {
env: { [id: string]: string | undefined };
data: string;
log: Console;
}
) =>
new Promise<void>((resolve, reject) => {
const child = child_process.spawn(command, args, {
env: opts.env,
stdio: 'pipe',
});
child.stdin.setDefaultEncoding('utf-8');
child.stdin.write(opts.data);
child.stdin.end();
child.on('error', reject);
let stderr = '';
child.stdout.on('data', (data) => {
/* istanbul ignore next */
opts.log.log(data.toString());
});
child.stderr.on('data', (data) => {
stderr += data;
opts.log.error(data.toString());
});
child.on('close', (code) => {
/* istanbul ignore else */
if (code === 0) {
resolve();
} else {
reject(new Error(stderr));
}
});
});
export const main = async ({
env = process.env,
log,
}: {
env?: EnvironmentVariables;
log: Console;
}) => {
const config = genConfig(env);
// Calculate paths that use temp diractory
const TMP_PATH = await fs.mkdtemp(
path.join(tmpdir(), 'git-publish-subdir-action-')
);
const REPO_TEMP = path.join(TMP_PATH, 'repo');
const SSH_AUTH_SOCK = path.join(TMP_PATH, 'ssh_agent.sock');
if (!env.GITHUB_EVENT_PATH) throw new Error('Expected GITHUB_EVENT_PATH');
const event: Event = JSON.parse(
(await fs.readFile(env.GITHUB_EVENT_PATH)).toString()
);
const name =
event.pusher?.name || env.GITHUB_ACTOR || 'Git Publish Subdirectory';
const email =
event.pusher?.email ||
(env.GITHUB_ACTOR
? `${env.GITHUB_ACTOR}@users.noreply.github.com`
: 'nobody@nowhere');
const tag = env.TAG;
// Set Git Config
await exec(`git config --global user.name "${name}"`, { log });
await exec(`git config --global user.email "${email}"`, { log });
interface GitInformation {
commitMessage: string;
sha: string;
}
/**
* Get information about the current git repository
*/
const getGitInformation = async (): Promise<GitInformation> => {
// Get the root git directory
let dir = process.cwd();
while (true) {
const isGitRepo = await fs
.stat(path.join(dir, '.git'))
.then((s) => s.isDirectory())
.catch(() => false);
if (isGitRepo) {
break;
}
// We need to traverse up one
const next = path.dirname(dir);
if (next === dir) {
log.log(
`##[info] Not running in git directory, unable to get information about source commit`
);
return {
commitMessage: '',
sha: '',
};
} else {
dir = next;
}
}
// Get current sha of repo to use in commit message
const gitLog = await git.log({
fs: fsModule,
depth: 1,
dir,
});
const commit = gitLog.length > 0 ? gitLog[0] : undefined;
if (!commit) {
log.log(`##[info] Unable to get information about HEAD commit`);
return {
commitMessage: '',
sha: '',
};
}
return {
commitMessage: commit.commit.message,
sha: commit.oid,
};
};
const gitInfo = await getGitInformation();
// Environment to pass to children
const childEnv = Object.assign({}, process.env, {
SSH_AUTH_SOCK,
});
if (config.mode === 'ssh') {
// Copy over the known_hosts file if set
let known_hosts = config.knownHostsFile;
// Use well-known known_hosts for certain domains
if (!known_hosts && config.parsedUrl.resource === 'github.com') {
known_hosts = KNOWN_HOSTS_GITHUB;
}
if (!known_hosts) {
log.warn(KNOWN_HOSTS_WARNING);
} else {
await mkdirP(SSH_FOLDER);
await fs.copyFile(known_hosts, KNOWN_HOSTS_TARGET);
}
// Setup ssh-agent with private key
log.log(`Setting up ssh-agent on ${SSH_AUTH_SOCK}`);
const sshAgentMatch = SSH_AGENT_PID_EXTRACT.exec(
(await exec(`ssh-agent -a ${SSH_AUTH_SOCK}`, { log, env: childEnv }))
.stdout
);
/* istanbul ignore if */
if (!sshAgentMatch) throw new Error('Unexpected output from ssh-agent');
childEnv.SSH_AGENT_PID = sshAgentMatch[1];
log.log(`Adding private key to ssh-agent at ${SSH_AUTH_SOCK}`);
await writeToProcess('ssh-add', ['-'], {
data: config.privateKey + '\n',
env: childEnv,
log,
});
log.log(`Private key added`);
}
// Clone the target repo
await exec(`git clone "${config.repo}" "${REPO_TEMP}"`, {
log,
env: childEnv,
}).catch((err) => {
const s = err.toString();
/* istanbul ignore else */
if (config.mode === 'ssh') {
/* istanbul ignore else */
if (s.indexOf('Host key verification failed') !== -1) {
log.error(KNOWN_HOSTS_ERROR(config.parsedUrl.resource));
} else if (s.indexOf('Permission denied (publickey') !== -1) {
log.error(SSH_KEY_ERROR);
}
}
throw err;
});
if (!config.squashHistory) {
// Fetch branch if it exists
await exec(`git fetch -u origin ${config.branch}:${config.branch}`, {
log,
env: childEnv,
cwd: REPO_TEMP,
}).catch((err) => {
const s = err.toString();
/* istanbul ignore if */
if (s.indexOf("Couldn't find remote ref") === -1) {
log.error(
"##[warning] Failed to fetch target branch, probably doesn't exist"
);
log.error(err);
}
});
// Check if branch already exists
log.log(`##[info] Checking if branch ${config.branch} exists already`);
const branchCheck = await exec(`git branch --list "${config.branch}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
});
if (branchCheck.stdout.trim() === '') {
// Branch does not exist yet, let's check it out as an orphan
log.log(`##[info] ${config.branch} does not exist, creating as orphan`);
await exec(`git checkout --orphan "${config.branch}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
});
} else {
await exec(`git checkout "${config.branch}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
});
}
} else {
// Checkout a random branch so we can delete the target branch if it exists
log.log('Checking out temp branch');
await exec(`git checkout -b "${Math.random().toString(36).substring(2)}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
});
// Delete the target branch if it exists
await exec(`git branch -D "${config.branch}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
}).catch((err) => {});
// Checkout target branch as an orphan
await exec(`git checkout --orphan "${config.branch}"`, {
log,
env: childEnv,
cwd: REPO_TEMP,
});
log.log('Checked out orphan');
}
// // Update contents of branch
log.log(`##[info] Updating branch ${config.branch}`);
/**
* The list of globs we'll use for clearing
*/
const globs = await (async () => {
if (env.CLEAR_GLOBS_FILE) {
// We need to use a custom mechanism to clear the files
log.log(
`##[info] Using custom glob file to clear target branch ${env.CLEAR_GLOBS_FILE}`
);
const globList = (await fs.readFile(env.CLEAR_GLOBS_FILE))
.toString()
.split('\n')
.map((s) => s.trim())
.filter((s) => s !== '');
return globList;
} else {
// Remove all files
log.log(`##[info] Removing all files from target branch`);
return ['**/*', '!.git'];
}
})();
const filesToDelete = fgStream(globs, {
absolute: true,
dot: true,
followSymbolicLinks: false,
cwd: REPO_TEMP,
});
// Delete all files from the filestream
for await (const entry of filesToDelete) {
await fs.unlink(entry);
}
const folder = path.resolve(process.cwd(), config.folder);
log.log(`##[info] Copying all files from ${folder}`);
// TODO: replace this copy with a node implementation
await exec(`cp -rT "${folder}"/ ./`, { log, env: childEnv, cwd: REPO_TEMP });
await exec(`git add -A .`, { log, env: childEnv, cwd: REPO_TEMP });
const message = config.message
.replace(/\{target\-branch\}/g, config.branch)
.replace(/\{sha\}/g, gitInfo.sha.substr(0, 7))
.replace(/\{long\-sha\}/g, gitInfo.sha)
.replace(/\{msg\}/g, gitInfo.commitMessage);
await git.commit({
fs: fsModule,
dir: REPO_TEMP,
message,
author: { email, name },
});
if (tag) {
log.log(`##[info] Tagging commit with ${tag}`);
await git.tag({
fs: fsModule,
dir: REPO_TEMP,
ref: tag,
force: true,
});
}
if (config.skipEmptyCommits) {
log.log(`##[info] Checking whether contents have changed before pushing`);
// Before we push, check whether it changed the tree,
// and avoid pushing if not
const head = await git.resolveRef({
fs: fsModule,
dir: REPO_TEMP,
ref: 'HEAD',
});
const currentCommit = await git.readCommit({
fs: fsModule,
dir: REPO_TEMP,
oid: head,
});
if (currentCommit.commit.parent.length === 1) {
const previousCommit = await git.readCommit({
fs: fsModule,
dir: REPO_TEMP,
oid: currentCommit.commit.parent[0],
});
if (currentCommit.commit.tree === previousCommit.commit.tree) {
log.log(`##[info] Contents of target repo unchanged, exiting.`);
return;
}
}
}
log.log(`##[info] Pushing`);
const forceArg = config.squashHistory ? '-f' : '';
const tagsArg = tag ? '--tags' : '';
const push = await exec(
`git push ${forceArg} origin "${config.branch}" ${tagsArg}`,
{ log, env: childEnv, cwd: REPO_TEMP }
);
log.log(push.stdout);
log.log(`##[info] Deployment Successful`);
if (config.mode === 'ssh') {
log.log(`##[info] Killing ssh-agent`);
await exec(`ssh-agent -k`, { log, env: childEnv });
}
}; | the_stack |
import * as fs from "fs";
import * as path from "path";
import * as osLib from "os";
import * as cp from "child_process";
import * as events from "events";
import { expect } from "chai";
import * as Sinon from "sinon";
import * as Nock from "nock";
import * as pfs from "../../../src/util/misc/promisfied-fs";
import CodePushReleaseReactCommand from "../../../src/commands/codepush/release-react";
import { CommandArgs } from "../../../src/util/commandline/command";
import * as mkdirp from "mkdirp";
import * as ReactNativeTools from "../../../src/commands/codepush/lib/react-native-utils";
import * as fileUtils from "../../../src/commands/codepush/lib/file-utils";
import { CommandFailedResult, CommandResult } from "../../../src/util/commandline";
import * as updateContentsTasks from "../../../src/commands/codepush/lib/update-contents-tasks";
import rimraf = require("rimraf");
const g2js = require("gradle-to-js/lib/parser");
describe("codepush release-react command", function () {
const app = "bogus/app";
const deployment = "bogus-deployment";
let sandbox: Sinon.SinonSandbox;
beforeEach(() => {
sandbox = Sinon.createSandbox();
sandbox.stub(updateContentsTasks, "sign");
});
afterEach(() => {
sandbox.restore();
});
const goldenPathArgs: CommandArgs = {
// prettier-ignore
args: [
"--extra-hermes-flag", "bogusHermes",
"--extra-bundler-option", "bogusRnBundle",
"--target-binary-version", "1.0.0",
"--output-dir", "fake/out/dir",
"--sourcemap-output-dir", "fake/sourcemap/output",
"--sourcemap-output", "sourceMapOutput.txt",
"--build-configuration-name", "Release",
"--plist-file-prefix", "",
"--plist-file", "",
"--gradle-file", "bogusApp/app.gradle",
"--entry-file", "entry.js",
"--development",
"--bundle-name", "bundle",
"--rollout", "100",
"--disable-duplicate-release-error",
"--private-key-path", "fake/private-key-path",
"--mandatory",
"--disabled",
"--description", "app description",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
command: ["codepush", "release-react"],
commandPath: "fake/path",
};
it("succeed if all parameters are passed", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "iOS",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.true;
});
it("npm package should have name defined check", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
// Act
const result = command.execute();
// Assert
return expect(result).to.eventually.be.rejectedWith('The "package.json" file in the CWD does not have the "name" field set.');
});
context("react-native dependency", function () {
it("throws error if no react native in dependencies and devDependencies", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native-code-push": "6.3.0"
}
}
`);
// Act
const result = (await command.execute()) as CommandFailedResult;
// Assert
expect(result.succeeded).to.be.false;
expect(result.errorMessage).to.equal("The project in the CWD is not a React Native project.");
});
it("finishes to end if react-native specified in dependencies ", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "iOS",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.true;
});
it("finishes to end if react-native specified in devDependencies ", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native-code-push": "6.3.0"
},
"devDependencies": {
"react-native": "0.63.3"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "iOS",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.true;
});
});
it("shows user friendly error when incorrect deployment specified", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(404, {});
// Act
const result = (await command.execute()) as CommandFailedResult;
// Assert
expect(result.succeeded).to.be.false;
expect(result.errorMessage).to.be.equal(`Deployment "${deployment}" does not exist.`);
});
context("when no output dir parameter specified, then temporarily directory is created", function () {
it("creates CodePush directory, so that it was compatible with SDK", async function () {
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--bundle-name", "bundle",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "Android",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
const mkTempDirStub = sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
sandbox.assert.calledWithExactly(mkTempDirStub, "code-push");
});
it("temporary directory is get removed once command finishes", async function () {
// Arrange
const fakePath = "fake/path/code-push";
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--bundle-name", "bundle",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "Android",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves(fakePath);
const rmDirSpy = sandbox.spy(pfs, "rmDir");
// Act
await command.execute();
// Assert
sandbox.assert.calledOnceWithExactly(rmDirSpy, path.join(fakePath, "CodePush"));
});
});
["Android", "iOS", "Windows"].forEach((os: string) => {
it(`only android, ios and windows OSes are allowed (check the API response) - check ${os}`, async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(ReactNativeTools, "getAndroidHermesEnabled").resolves(false);
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.true;
});
});
it("throws an error if non react-native platform specified", async function () {
// Arrange
const command = new CodePushReleaseReactCommand(goldenPathArgs);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "iOS",
platform: "objective-c",
});
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.false;
});
context("bundle name defaults", function () {
it("set correct bundle name for ios", async function () {
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "iOS",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const rnBundleStub = sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
expect(rnBundleStub.getCalls()[0].args[0]).to.be.equal("main.jsbundle");
});
it("another platform", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const rnBundleStub = sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
expect(rnBundleStub.getCalls()[0].args[0]).to.be.equal(`index.${os.toLowerCase()}.bundle`);
});
});
context("entry file", function () {
context("if not specified", function () {
it("then defaults to index.{os}.js", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const rnBundleStub = sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
expect(rnBundleStub.getCalls()[0].args[2]).to.be.equal(`index.${os.toLowerCase()}.js`);
});
it("and fallback to index.js", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
const firstAttemptEntryFileName = `index.${os.toLowerCase()}.js`;
sandbox.replace(fileUtils, "fileDoesNotExistOrIsDirectory", (path) => path === firstAttemptEntryFileName);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const rnBundleStub = sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
expect(rnBundleStub.getCalls()[0].args[2]).to.be.equal("index.js");
});
it("fails command if no file found", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
const firstAttemptEntryFileName = `index.${os.toLowerCase()}.js`;
const secondAttemptEntryFileName = "index.js";
sandbox.replace(fileUtils, "fileDoesNotExistOrIsDirectory", (path) => {
return path === firstAttemptEntryFileName || path === secondAttemptEntryFileName;
});
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.false;
});
});
it("fails the command if entry file specified is not found", async function () {
// Arrange
const entryFile = "bogusEntryFile";
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
"--entry-file", entryFile
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os: "Android",
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
const fileNotExistStub = sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(true);
// Act
const result = await command.execute();
// Assert
expect(fileNotExistStub.calledWithExactly(entryFile)).to.be.true;
expect(result.succeeded).to.be.false;
});
});
it("composes sourcemapOutput when --sourcemap-output parameter is not provided", async function () {
const os = "Android";
const bundleName = "bogus.bundle";
const sourcemapOutputDir = "/fake/dir";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--sourcemap-output-dir", sourcemapOutputDir,
"--bundle-name", bundleName,
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const rnBundleStub = sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
expect(rnBundleStub.getCalls()[0].args[5]).to.be.equal(path.join(sourcemapOutputDir, `${bundleName}.map`));
});
context("targetBinaryVersion", function () {
it("fails if targetBinaryVersion is not in valid range", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "invalid-range",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
// Act
const result = (await command.execute()) as CommandFailedResult;
// Assert
expect(result.errorMessage).to.be.equal("Invalid binary version(s) for a release.");
expect(result.succeeded).to.be.false;
});
it("sets targetBinaryVersion from project settings if not specified", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(fileUtils, "removeReactTmpDir");
const fallback = sandbox.stub(ReactNativeTools, "getReactNativeProjectAppVersion");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
// Act
await command.execute();
// Assert
Sinon.assert.called(fallback);
});
});
it("removes temporary RN directory", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(pfs, "mkTempDir").resolves("fake/path/code-push");
const syncStub = sandbox.stub(rimraf, "sync");
// Act
await command.execute();
// Assert
Sinon.assert.calledOnceWithExactly(syncStub, `${osLib.tmpdir()}/react-*`);
});
context("hermes", function () {
it("hermes enabled only when specified in the app podfile file", async function () {
const os = "ios";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "17.0.0",
"react-native": "0.64.0",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(ReactNativeTools, "getiOSHermesEnabled").returns(true);
const runHermesEmitBinaryCommandStub = sandbox.stub(ReactNativeTools, "runHermesEmitBinaryCommand");
// Act
await command.execute();
// Assert
expect(runHermesEmitBinaryCommandStub.calledOnce).is.true;
});
it("hermes enabled only when specified in the app gradle file", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(fs, "lstatSync").returns({ isDirectory: () => false } as any);
sandbox.stub(g2js, "parseFile").resolves({ "project.ext.react": ["enableHermes: true"] });
const runHermesEmitBinaryCommandStub = sandbox.stub(ReactNativeTools, "runHermesEmitBinaryCommand");
// Act
await command.execute();
// Assert
expect(runHermesEmitBinaryCommandStub.calledOnce).is.true;
});
it("project.ext.react is not defined in the app gradle file", async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(fs, "lstatSync").returns({ isDirectory: () => false } as any);
sandbox.stub(g2js, "parseFile").resolves({ bogusKey: "bogusValue" });
const runHermesEmitBinaryCommandStub = sandbox.stub(ReactNativeTools, "runHermesEmitBinaryCommand");
// Act
await command.execute();
// Assert
expect(runHermesEmitBinaryCommandStub.notCalled).is.true;
});
context("RN versions", function () {
[
{ version: "^0.63.3", desc: "versions starting with caret" },
{ version: "mobiletechvn/react-native#v0.63.2.fix-shadow-node", desc: "custom versions" },
].forEach((testCase) => {
it(`works for ${testCase.desc} like (${testCase.version})`, async function () {
const os = "Android";
// Arrange
const args = {
...goldenPathArgs,
// prettier-ignore
args: [
"--target-binary-version", "1.0.0",
"--deployment-name", deployment,
"--app", app,
"--token", "c1o3d3e7",
],
};
const command = new CodePushReleaseReactCommand(args);
sandbox.stub(fs, "readFileSync").returns(`
{
"name": "RnCodepushAndroid",
"version": "0.0.1",
"dependencies": {
"react": "16.13.1",
"react-native": "^0.63.3",
"react-native-code-push": "6.3.0"
}
}
`);
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}/deployments/${deployment}`).reply(200, {});
Nock("https://api.appcenter.ms/").get(`/v0.1/apps/${app}`).reply(200, {
os,
platform: "react-native",
});
sandbox.stub(mkdirp, "sync");
sandbox.stub(fileUtils, "fileDoesNotExistOrIsDirectory").returns(false);
sandbox.stub(fileUtils, "createEmptyTmpReleaseFolder");
sandbox.stub(command, "release" as any).resolves(<CommandResult>{ succeeded: true });
sandbox.stub(fileUtils, "removeReactTmpDir");
sandbox.stub(ReactNativeTools, "runReactNativeBundleCommand");
sandbox.stub(fs, "lstatSync").returns({ isDirectory: () => false } as any);
sandbox.stub(g2js, "parseFile").resolves({ "project.ext.react": ["enableHermes: true"] });
const childProcessStub = new events.EventEmitter() as any;
childProcessStub.stdout = {
on: () => {},
};
childProcessStub.stderr = {
on: () => {},
};
sandbox
.stub(cp, "spawn")
.onFirstCall()
.callsFake(() => {
setTimeout(() => {
childProcessStub.emit("close");
});
return childProcessStub as any;
});
sandbox.stub(fs, "copyFile").yields(null);
sandbox.stub(fs, "unlink").yields(null);
// Act
const result = await command.execute();
// Assert
expect(result.succeeded).to.be.true;
});
});
});
});
}); | the_stack |
import { createStore } from "@frontity/connect";
import clone from "clone-deep";
import wpSource from "../";
import merge from "deepmerge";
describe("state.source.get", () => {
const initStore = (data = {}) => {
const config = clone(wpSource());
// replace data by the one passed as argument
config.state.source.data = data;
return createStore(config);
};
test("returns an object with isReady/isFetching = false if not found", () => {
const store = initStore();
expect(store.state.source.get("/some-post/")).toEqual({
isFetching: false,
isReady: false,
link: "/some-post/",
page: 1,
query: {},
route: "/some-post/",
});
});
test("returns the correct object (path)", () => {
const post = {
type: "post",
id: 1,
isPostType: true,
isReady: true,
isFetching: false,
};
const { source } = initStore({ "/some-post/": post }).state;
expect(source.get("/some-post")).toEqual(post);
expect(source.get("/some-post/")).toEqual(post);
expect(source.get("https://wp.site.test/some-post/")).toEqual(post);
});
test("returns the correct object (path, page)", () => {
const archive = {
taxonomy: "tag",
id: 2,
items: [],
isArchive: true,
isTaxonomy: true,
isTag: true,
isReady: true,
isFetching: false,
};
const { source } = initStore({ "/tag/some-tag/page/2/": archive }).state;
expect(source.get("/tag/some-tag/page/2")).toEqual(archive);
expect(source.get("/tag/some-tag/page/2/")).toEqual(archive);
expect(source.get("https://wp.site.test/tag/some-tag/page/2/")).toEqual(
archive
);
});
test("returns the correct object (path, query)", () => {
const archive = {
taxonomy: "tag",
id: 2,
items: [],
isArchive: true,
isTaxonomy: true,
isTag: true,
isReady: true,
isFetching: false,
};
const { source } = initStore({ "/tag/some-tag/?s=search": archive }).state;
expect(source.get("/tag/some-tag?s=search")).toEqual(archive);
expect(source.get("/tag/some-tag/?s=search")).toEqual(archive);
expect(source.get("https://wp.site.test/tag/some-tag/?s=search")).toEqual(
archive
);
});
test("returns the correct object (path, unordered query)", () => {
const archive = {
taxonomy: "tag",
id: 2,
items: [],
isArchive: true,
isTaxonomy: true,
isTag: true,
isReady: true,
isFetching: false,
};
const { source } = initStore({
"/tag/some-tag/?k1=v1&k2=v2": archive,
}).state;
expect(source.get("/tag/some-tag?k2=v2&k1=v1")).toEqual(archive);
expect(source.get("/tag/some-tag/?k2=v2&k1=v1")).toEqual(archive);
expect(
source.get("https://wp.site.test/tag/some-tag/?k2=v2&k1=v1")
).toEqual(archive);
});
test("returns the correct object (path, page, query)", () => {
const archive = {
taxonomy: "tag",
id: 2,
items: [],
isArchive: true,
isTaxonomy: true,
isTag: true,
isReady: true,
isFetching: false,
};
const { source } = initStore({
"/tag/some-tag/page/2/?s=search": archive,
}).state;
expect(source.get("/tag/some-tag/page/2/?s=search")).toEqual(archive);
expect(source.get("/tag/some-tag/page/2?s=search")).toEqual(archive);
expect(
source.get("https://wp.site.test/tag/some-tag/page/2?s=search")
).toEqual(archive);
});
});
describe("state.wpSource.isWpCom (state.source.isWpCom)", () => {
const initStore = () => {
const config = clone(merge(wpSource(), { state: { frontity: {} } }));
return createStore(config);
};
it("should be false (state.frontity.url, state.source.url not WP com subdomain)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
state.source.url = "https://wp-domain.com/";
expect(state.wpSource.isWpCom).toBe(false);
expect(state.source.isWpCom).toBe(false);
});
it("should be false (state.frontity.url, state.wpSource.api not WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.wpSource.api = "https://wp-domain.com/wp-json/";
expect(state.wpSource.isWpCom).toBe(false);
expect(state.source.isWpCom).toBe(false);
// Without trailing slash.
state.wpSource.api = "https://wp-domain.com/wp-json";
expect(state.wpSource.isWpCom).toBe(false);
expect(state.source.isWpCom).toBe(false);
});
it("should be false (state.frontity.url, state.source.api not WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.source.api = "https://wp-domain.com/wp-json/";
expect(state.wpSource.isWpCom).toBe(false);
expect(state.source.isWpCom).toBe(false);
// Without trailing slash.
state.source.api = "https://wp-domain.com/wp-json";
expect(state.wpSource.isWpCom).toBe(false);
expect(state.source.isWpCom).toBe(false);
});
it("should be true (state.frontity.url, state.source.url is WP com subdomain)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
state.source.url = "https://sub.wordpress.com/";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
});
it("should be true (state.frontity.url, state.wpSource.api is WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
// Without trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
});
it("should be true (state.frontity.url, state.source.api is WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
// Without trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
});
it("should be true (state.frontity.url, state.wpSource.api is WP com, custom domain)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
// Without trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
});
it("should be true (state.frontity.url, state.source.api is WP com, custom domain)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
// Without trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com";
expect(state.wpSource.isWpCom).toBe(true);
expect(state.source.isWpCom).toBe(true);
});
});
describe("state.wpSource.api (state.source.api)", () => {
const initStore = () => {
const config = clone(merge(wpSource(), { state: { frontity: {} } }));
return createStore(config);
};
it("should return a WP org and Business WP com API (state.frontity.url)", () => {
const { state } = initStore();
// With trailing slash.
state.frontity.url = "https://final-domain.com/";
expect(state.wpSource.api).toBe("https://final-domain.com/wp-json/");
expect(state.source.api).toBe("https://final-domain.com/wp-json/");
// Without trailing slash.
state.frontity.url = "https://final-domain.com";
expect(state.wpSource.api).toBe("https://final-domain.com/wp-json/");
expect(state.source.api).toBe("https://final-domain.com/wp-json/");
});
it("should return a WP org and Business WP com API (state.source.url)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.source.url = "https://wp-domain.com/";
expect(state.wpSource.api).toBe("https://wp-domain.com/wp-json/");
expect(state.source.api).toBe("https://wp-domain.com/wp-json/");
// Without trailing slash.
state.source.url = "https://wp-domain.com";
expect(state.wpSource.api).toBe("https://wp-domain.com/wp-json/");
expect(state.source.api).toBe("https://wp-domain.com/wp-json/");
});
it("should return a WP org and Business WP com API (state.frontity.url, prefix)", () => {
const { state } = initStore();
// With a trailing slash and different combinations of `prefix`.
state.frontity.url = "https://final-domain.com/";
state.wpSource.prefix = "api";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "api/";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "/api";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "/api/";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
// Without a trailing slash.
state.frontity.url = "https://final-domain.com";
state.wpSource.prefix = "api";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "api/";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "/api";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
state.wpSource.prefix = "/api/";
expect(state.wpSource.api).toBe("https://final-domain.com/api/");
expect(state.source.api).toBe("https://final-domain.com/api/");
});
it("should return a WP org and Business WP com API (state.source.url, prefix)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash and different combinations of `prefix`.
state.source.url = "https://wp-domain.com/";
state.wpSource.prefix = "api";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "api/";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "/api";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "/api/";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
// Without a trailing slash.
state.source.url = "https://wp-domain.com";
state.wpSource.prefix = "api";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "api/";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "/api";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
state.wpSource.prefix = "/api/";
expect(state.wpSource.api).toBe("https://wp-domain.com/api/");
expect(state.source.api).toBe("https://wp-domain.com/api/");
});
it("should return a WP com API (state.frontity.url, state.wpSource.isWpCom)", () => {
const { state } = initStore();
state.wpSource.isWpCom = true;
// With final slash.
state.frontity.url = "https://final-domain.com/";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/final-domain.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/final-domain.com/"
);
// Without final slash.
state.frontity.url = "https://final-domain.com";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/final-domain.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/final-domain.com/"
);
});
it("should return a WP com API (state.source.url - Free WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With trailing slash.
state.source.url = "https://sub.wordpress.com/";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/"
);
// Without trailing slash.
state.source.url = "https://sub.wordpress.com";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/sub.wordpress.com/"
);
});
it("should return a WP com API (state.source.url, state.wpSource.isWpCom)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
state.wpSource.isWpCom = true;
// With final slash.
state.source.url = "https://wp-domain.com/";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/"
);
// Without final slash.
state.source.url = "https://wp-domain.com";
expect(state.wpSource.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/"
);
expect(state.source.api).toBe(
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/"
);
});
});
describe("state.source.url", () => {
const initStore = () => {
const config = clone(merge(wpSource(), { state: { frontity: {} } }));
return createStore(config);
};
it("should return state.frontity.url if no other props are set (embedded)", () => {
const { state } = initStore();
// With a trailing slash.
state.frontity.url = "https://final-domain.com/";
expect(state.source.url).toBe("https://final-domain.com/");
// Without a trailing slash.
state.frontity.url = "https://final-domain.com";
expect(state.source.url).toBe("https://final-domain.com/");
});
it("should derive from state.wpSource.api (WP org and Business WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.wpSource.api = "https://wp-domain.com/wp-json/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.wpSource.api = "https://wp-domain.com/wp-json";
expect(state.source.url).toBe("https://wp-domain.com/");
});
it("should derive from state.source.api (WP org and Business WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.source.api = "https://wp-domain.com/wp-json/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.source.api = "https://wp-domain.com/wp-json";
expect(state.source.url).toBe("https://wp-domain.com/");
});
it("should derive from state.wpSource.api (WP org and Business WP com, w/ subdirectory)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.wpSource.api = "https://wp-domain.com/subdir/wp-json/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
// Without a trailing slash.
state.wpSource.api = "https://wp-domain.com/subdir/wp-json";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
});
it("should derive from state.source.api (WP org and Business WP com, w/ subdirectory)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.source.api = "https://wp-domain.com/subdir/wp-json/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
// Without a trailing slash.
state.source.api = "https://wp-domain.com/subdir/wp-json";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
});
it("should derive from state.wpSource.api and prefix (WP org and Business WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash and different combinations of `prefix`.
state.wpSource.api = "https://wp-domain.com/api/";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.wpSource.api = "https://wp-domain.com/api";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/");
});
it("should derive from state.source.api and prefix (WP org and Business WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash and different combinations of `prefix`.
state.source.api = "https://wp-domain.com/api/";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.source.api = "https://wp-domain.com/api";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/");
});
it("should derive from state.wpSource.api and prefix (WP org and Business WP com, w/ subdirectory)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash and different combinations of `prefix`.
state.wpSource.api = "https://wp-domain.com/subdir/api/";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
// Without a trailing slash.
state.wpSource.api = "https://wp-domain.com/subdir/api";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
});
it("should derive from state.source.api and prefix (WP org and Business WP com, w/ subdirectory)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash and different combinations of `prefix`.
state.source.api = "https://wp-domain.com/subdir/api/";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
// Without a trailing slash.
state.source.api = "https://wp-domain.com/subdir/api";
state.wpSource.prefix = "api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
state.wpSource.prefix = "/api/";
expect(state.source.url).toBe("https://wp-domain.com/subdir/");
});
it("should derive from state.wpSource.api (Free, Personal and Premium WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.wpSource.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com";
expect(state.source.url).toBe("https://wp-domain.com/");
});
it("should derive from state.source.api (Free, Personal and Premium WP com)", () => {
const { state } = initStore();
state.frontity.url = "https://final-domain.com/";
// With a trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com/";
expect(state.source.url).toBe("https://wp-domain.com/");
// Without a trailing slash.
state.source.api =
"https://public-api.wordpress.com/wp/v2/sites/wp-domain.com";
expect(state.source.url).toBe("https://wp-domain.com/");
});
}); | the_stack |
import { ITelemetryBaseLogger } from '@fluidframework/common-definitions';
import Denque from 'denque';
import { assert, fail, noop } from './Common';
import { EditLog, SequencedOrderedEditId } from './EditLog';
import { Snapshot } from './Snapshot';
import { Edit, EditStatus, EditingResult, GenericTransaction } from './generic';
import { EditId } from './Identifiers';
import { RevisionValueCache } from './RevisionValueCache';
import { initialTree } from './InitialTree';
import { ReconciliationEdit, ReconciliationPath } from './ReconciliationPath';
/**
* Callback for when an edit is applied (meaning the result of applying it to a particular snapshot is computed).
*
* Edits may be applied any time a Snapshot is computed that includes them.
* Depending on the caching policy of the LogViewer, a given edit may or may not be applied in order to compute a Snapshot containing it.
*
* If the same edit occurs in different contexts (ex: a local edit is adjusted for a new remote edit),
* that it will be reapplied, and this may result in different results.
*
* Edits may additionally be reapplied at other times since their previous output might not be cached.
*
* If an application requests the current view, this will force all edits to be applied.
* Such an application can use this callback can be log each edit as it comes it to see its status,
* however this may include duplicates, as well as entries for reapplications in modified contexts.
*
* In the context of this callback,
* skipping the first evaluation of an edit in a particular context due to setKnownEditingResult is still considered applying.
* To use this call back to track when the actual computational work of applying edits is done, only count cases when `wasCached` is false.
*/
export type EditStatusCallback = (editResult: EditStatus, editId: EditId, wasCached: boolean) => void;
/**
* Result of applying an identified transaction.
* @public
*/
export type EditingResultWithId<TChange> = EditingResult<TChange> & {
/**
* Unique identifier for this edit. Must never be reused.
* Used for referencing and de-duplicating edits.
*/
readonly id: EditId;
};
/**
* The data cached by `CachingLogViewer` for an edit.
*/
export type EditCacheEntry<TChange> =
| SuccessfulEditCacheEntry<TChange>
| UnsuccessfulEditCacheEntry
| SummarizedEditResultCacheEntry;
/**
* The data cached by `CachingLogViewer` for an edit that it has attempted to apply locally.
*/
export type AttemptedEditResultCacheEntry<TChange> = SuccessfulEditCacheEntry<TChange> | UnsuccessfulEditCacheEntry;
/**
* The data cached by `CachingLogViewer` for an edit that it has successfully applied locally.
*/
export interface SuccessfulEditCacheEntry<TChange> {
/**
* The snapshot resulting from the edit.
*/
snapshot: Snapshot;
/**
* The status code for the edit that produced the snapshot.
*/
status: EditStatus.Applied;
/**
* The resolved changes that were applied during the edit and their associated outcome.
*/
steps: readonly { resolvedChange: TChange; after: Snapshot }[];
}
/**
* The data cached by `CachingLogViewer` for an edit that it has unsuccessfully attempted to apply locally.
*/
export interface UnsuccessfulEditCacheEntry {
/**
* The snapshot resulting from the edit.
*/
readonly snapshot: Snapshot;
/**
* The status code for the edit that produced the snapshot.
*/
status: EditStatus.Invalid | EditStatus.Malformed;
}
/**
* The data cached by `CachingLogViewer` for an edit that it has retrieved from a summary.
* TODO:#57176: once summarized edits carry enough information remove this interface and use `AttemptedEditResultCacheEntry` instead.
*/
export interface SummarizedEditResultCacheEntry {
/**
* The snapshot resulting from the edit.
*/
snapshot: Snapshot;
status?: undefined;
}
/**
* A revision corresponds to an index in an `EditLog`.
*
* It is associated with the output `Snapshot` of applying the edit at the index to the previous revision.
* For example:
* - revision 0 corresponds to the initialSnapshot.
* - revision 1 corresponds to the output of editLog[0] applied to the initialSnapshot.
*/
export type Revision = number;
/**
* Creates `Snapshot`s for the revisions in an `EditLog`
*/
export interface LogViewer {
/**
* Returns the `Snapshot` output associated with the largest revision in `editLog` less than (but not equal to) the supplied revision.
*
* For example:
* - revision 0 returns the initialSnapshot.
* - revision 1 returns the output of editLog[0] (or initialSnapshot if there is no edit 0).
* - revision Number.POSITIVE_INFINITY returns the newest revision.
*/
getSnapshot(revision: Revision): Promise<Snapshot>;
/**
* Returns the `Snapshot` output associated with the largest revision in `editLog` less than (but not equal to) the supplied revision.
* Can only be used to retrieve revisions added during the current sessions.
*
* For example:
* - revision 0 returns the initialSnapshot.
* - revision 1 returns the output of editLog[0] (or initialSnapshot if there is no edit 0).
* - revision Number.POSITIVE_INFINITY returns the newest revision.
*/
getSnapshotInSession(revision: Revision): Snapshot;
}
/**
* Creates Snapshots for revisions associated with an EditLog and caches the results.
* @internal
*/
export class CachingLogViewer<TChange> implements LogViewer {
public readonly log: EditLog<TChange>;
/**
* Maximum size of the sequenced snapshot cache.
*/
public static readonly sequencedCacheSizeMax = 50;
/**
* A cache for local snapshots.
* It is invalidated whenever a new sequenced edit (that was not already a local edit) is added to the log.
* When a previously local edit is sequenced, this cache is adjusted to account for it, not invalidated.
*/
private readonly localSnapshotCache = new Denque<AttemptedEditResultCacheEntry<TChange>>();
/**
* Cache of sequenced snapshots.
*/
private readonly sequencedSnapshotCache: RevisionValueCache<EditCacheEntry<TChange>>;
/**
* Called whenever an edit is processed.
* This will have been called at least once for any edit if a revision after than edit has been requested.
* It may be called multiple times: the number of calls and when they occur depends on caching and is an implementation detail.
*/
private readonly processEditStatus: EditStatusCallback;
/**
* Iff true, additional correctness assertions will be run during LogViewer operations.
*/
private readonly expensiveValidation: boolean;
/**
* Telemetry logger, used to log events such as edit application rejection.
*/
private readonly logger: ITelemetryBaseLogger;
/**
* The ordered queue of edits that originated from this client that have never been applied (by this log viewer) in a sequenced state.
* This means these edits may be local or sequenced, and may have been applied (possibly multiple times) while still local.
* Used to log telemetry about the result of edit application. Edits are removed when first applied after being sequenced.
*/
private readonly unappliedSelfEdits = new Denque<EditId>();
/**
* Cache of applying a edit.
* Due to use of Transactions in checkouts, a common pattern involves applying an edit
* as part of the transaction, then submitting it.
* This cache helps optimize that case by avoiding recomputing the edit if no other edits were added during the transaction.
*/
private cachedEditResult?: { editId: EditId; result: EditingResult<TChange> };
private readonly transactionFactory: (snapshot: Snapshot) => GenericTransaction<TChange>;
/**
* Create a new LogViewer
* @param log - the edit log which snapshots will be based on.
* @param baseTree - the tree used in the snapshot corresponding to the 0th revision. Defaults to `initialTree`.
* @param knownRevisions - a set of [sequencedRevision, snapshot] pairs that are known (have been precomputed) at construction time.
* These revisions are guaranteed to never be evicted from the cache.
* @param expensiveValidation - Iff true, additional correctness assertions will be run during LogViewer operations.
* @param processEditStatus - called after applying an edit.
* @param logger - used to log telemetry
*/
public constructor(
log: EditLog<TChange>,
baseSnapshot: Snapshot = Snapshot.fromTree(initialTree),
knownRevisions: [Revision, EditCacheEntry<TChange>][] = [],
expensiveValidation = false,
processEditStatus: EditStatusCallback = noop,
logger: ITelemetryBaseLogger,
transactionFactory: (snapshot: Snapshot) => GenericTransaction<TChange>,
minimumSequenceNumber = 0
) {
this.log = log;
if (expensiveValidation) {
knownRevisions.forEach(([revision]) => {
assert(Number.isInteger(revision), 'revision must be an integer');
assert(
this.log.isSequencedRevision(revision),
'revision must correspond to the result of a SequencedEdit'
);
});
}
this.sequencedSnapshotCache = new RevisionValueCache(
CachingLogViewer.sequencedCacheSizeMax,
minimumSequenceNumber,
[...knownRevisions, [0, { snapshot: baseSnapshot }]]
);
this.processEditStatus = processEditStatus ?? noop;
this.expensiveValidation = expensiveValidation;
this.logger = logger;
this.transactionFactory = transactionFactory;
this.log.registerEditAddedHandler(this.handleEditAdded.bind(this));
}
/**
* Performs the tracking needed to log telemetry about failed (invalid/malformed) local edits when they are sequenced.
* As a performance optimization, this method also caches snapshots generated by local edits if they are sequenced without
* being interleaved with remote edits.
*/
private handleEditAdded(edit: Edit<TChange>, isLocal: boolean, wasLocal: boolean): void {
if (isLocal) {
this.unappliedSelfEdits.push(edit.id);
} else if (wasLocal) {
// If the new sequenced edit was generated by this client, the corresponding cache entry (if there is one)
// will be at the front of the queue. If the queue is empty, then a concurrent sequenced edit from remote client
// must have invalidated the queue cache.
const entry = this.localSnapshotCache.shift();
if (entry !== undefined) {
const revision = this.log.numberOfSequencedEdits;
const snapshot = entry.snapshot;
this.sequencedSnapshotCache.cacheValue(
revision,
entry.status === EditStatus.Applied
? {
snapshot,
status: entry.status,
steps: entry.steps,
}
: {
snapshot,
status: entry.status,
}
);
this.handleSequencedEditResult(edit, entry);
}
} else {
// Invalidate any cached results of applying edits which are ordered after `edit` (which are all remaining local edits)
this.localSnapshotCache.clear();
}
}
public async getEditResult(revision: Revision): Promise<EditCacheEntry<TChange>> {
const startingPoint = this.getStartingPoint(revision);
const { startRevision } = startingPoint;
let current: EditCacheEntry<TChange> = startingPoint;
for (let i = startRevision; i < revision && i < this.log.length; i++) {
const edit = await this.log.getEditAtIndex(i);
current = this.applyEdit(current.snapshot, edit, i);
}
return current;
}
public async getSnapshot(revision: Revision): Promise<Snapshot> {
return (await this.getEditResult(revision)).snapshot;
}
public getEditResultInSession(revision: Revision): EditCacheEntry<TChange> {
const startingPoint = this.getStartingPoint(revision);
const { startRevision } = startingPoint;
let current: EditCacheEntry<TChange> = startingPoint;
for (let i = startRevision; i < revision && i < this.log.length; i++) {
const edit = this.log.getEditInSessionAtIndex(i);
current = this.applyEdit(current.snapshot, edit, i);
}
return current;
}
public getSnapshotInSession(revision: Revision): Snapshot {
return this.getEditResultInSession(revision).snapshot;
}
/**
* Informs the CachingLogViewer of the latest known minimumSequenceNumber for all connected clients.
* This can be used to provide more aggressive caching of revisions within the collaboration window, as those revisions
* are more likely to be demanded to resolve conflicts.
* @param minSequenceNumber - the minimum known sequence number of all connected clients.
*/
public setMinimumSequenceNumber(minimumSequenceNumber: number): void {
// Sequence numbers in Fluid are 1-indexed, meaning they correspond to revisions, and can be used as revisions.
// This ensures that all revisions >= minimumSequenceNumber are kept in the cache, meaning that even if all clients are caught up
// the most recent sequenced revision will be cached.
this.sequencedSnapshotCache.updateRetentionWindow(minimumSequenceNumber);
}
/**
* Inform the CachingLogViewer that a particular edit is know to have a specific result when applied to a particular Snapshot.
* LogViewer may use this information to as a optimization to avoid re-running the edit if re-applied to the same Snapshot.
*/
public setKnownEditingResult(edit: Edit<TChange>, result: EditingResult<TChange>): void {
this.cachedEditResult = { editId: edit.id, result };
}
/**
* @returns the cached snapshot closest to the requested `revision`.
*/
private getStartingPoint(revision: Revision): { startRevision: Revision } & EditCacheEntry<TChange> {
// Per the documentation for revision, the returned snapshot should be the output of the edit at the largest index <= `revision`.
const revisionClamped = Math.min(revision, this.log.length);
let current: EditCacheEntry<TChange>;
let startRevision: Revision;
const { numberOfSequencedEdits } = this.log;
const isLocalRevision = revisionClamped > numberOfSequencedEdits;
if (isLocalRevision && !this.localSnapshotCache.isEmpty()) {
const { length } = this.localSnapshotCache;
// Local snapshot cache is indexed such that the snapshot for revision 0 (a local edit) is stored at index 0 in the cache.
// This is because the local cache does not contain an entry for the implicit initial tree edit.
const localCacheIndex = revisionClamped - 1 - numberOfSequencedEdits;
if (localCacheIndex < length) {
const cached =
this.localSnapshotCache.peekAt(localCacheIndex) ?? fail('missing tail of localSnapshotCache');
return {
...cached,
startRevision: revisionClamped,
};
} else {
current = this.localSnapshotCache.peekAt(length - 1) ?? fail('missing tail of localSnapshotCache');
startRevision = numberOfSequencedEdits + length;
}
} else {
const [cachedRevision, cachedSnapshot] =
this.sequencedSnapshotCache.getClosestEntry(revisionClamped) ?? fail('No preceding snapshot cached.');
startRevision = cachedRevision;
current = cachedSnapshot;
}
return { startRevision, ...current };
}
/**
* Helper for applying an edit at the supplied snapshot.
* Must only be called in the order that edits appear in the log.
* Must only be called once for a given local edit as long as the local cache has not been invalidated.
* Must only be called once for a given sequenced edit.
* @returns the resulting snapshot and the outcome of edit that produced it.
*/
private applyEdit(
prevSnapshot: Snapshot,
edit: Edit<TChange>,
editIndex: number
): AttemptedEditResultCacheEntry<TChange> {
let editingResult: EditingResult<TChange>;
let cached;
if (
this.cachedEditResult !== undefined &&
this.cachedEditResult.editId === edit.id &&
this.cachedEditResult.result.before === prevSnapshot
) {
editingResult = this.cachedEditResult.result;
cached = true;
} else {
editingResult = this.transactionFactory(prevSnapshot)
.applyChanges(edit.changes, this.reconciliationPathFromEdit(edit.id))
.close();
cached = false;
}
const revision = editIndex + 1;
let nextSnapshot: Snapshot;
if (editingResult.status === EditStatus.Applied) {
nextSnapshot = editingResult.after;
} else {
nextSnapshot = prevSnapshot;
}
const computedCacheEntry =
editingResult.status === EditStatus.Applied
? { snapshot: nextSnapshot, status: editingResult.status, steps: editingResult.steps }
: { snapshot: nextSnapshot, status: editingResult.status };
if (this.log.isSequencedRevision(revision)) {
this.sequencedSnapshotCache.cacheValue(revision, computedCacheEntry);
this.handleSequencedEditResult(edit, computedCacheEntry);
} else {
// This relies on local edits being append only, and that generating the snapshot for a local revision requires generating
// the snapshot for all local revisions before it in the log. Thus, generating such a snapshot will necessarily require
// calls to this method for all local revisions prior, guaranteeing the correct push order.
assert(
revision === this.log.numberOfSequencedEdits + this.localSnapshotCache.length + 1,
'Local snapshot cached out of order.'
);
this.localSnapshotCache.push(computedCacheEntry);
}
this.processEditStatus(editingResult.status, this.log.getIdAtIndex(editIndex), cached);
return computedCacheEntry;
}
/**
* Helper for performing caching and telemetry logging when a sequenced local edit is first applied.
* Must only be called for non-cached sequenced edits.
*/
private handleSequencedEditResult(edit: Edit<TChange>, result: AttemptedEditResultCacheEntry<TChange>): void {
// This is the first time this sequenced edit has been processed by this LogViewer. If it was a local edit, log telemetry
// in the event that it was invalid or malformed.
if (this.unappliedSelfEdits.length > 0) {
if (edit.id === this.unappliedSelfEdits.peekFront()) {
if (result.status !== EditStatus.Applied) {
this.logger.send({
category: 'generic',
eventName:
result.status === EditStatus.Malformed
? 'MalformedSharedTreeEdit'
: 'InvalidSharedTreeEdit',
});
}
this.unappliedSelfEdits.shift();
} else if (this.expensiveValidation) {
for (let i = 0; i < this.unappliedSelfEdits.length; i++) {
assert(this.unappliedSelfEdits.peekAt(i) !== edit.id, 'Local edits processed out of order.');
}
}
}
}
/**
* We currently compute only the "main branch" part of the reconciliation path (meaning we don't include inverts of the edits
* that occurred on the rebased branch). Doing so is only needed for the sequential anchor resolution approach which is not
* yet supported.
* @param editId - The ID for the edit to get the reconciliation path for.
*/
public reconciliationPathFromEdit(editId: EditId): ReconciliationPath<TChange> {
const reconciliationPath: ReconciliationEdit<TChange>[] = [];
let cached = false;
return new Proxy(reconciliationPath, {
get: (target, prop): unknown => {
if (!cached) {
cached = true;
const orderedId = this.log.getOrderedEditId(editId);
if (orderedId.isLocal === false && orderedId.sequenceInfo !== undefined) {
const earliestSequenced = this.earliestSequencedEditInSession();
if (earliestSequenced !== undefined) {
const earliestEditSequenceNumber = earliestSequenced.sequenceNumber;
const targetSequenceNumber = Math.max(
earliestEditSequenceNumber,
orderedId.sequenceInfo.referenceSequenceNumber
);
if (targetSequenceNumber < orderedId.sequenceInfo.sequenceNumber) {
const firstEdit = this.getEditResultFromSequenceNumber(targetSequenceNumber);
if (firstEdit !== undefined) {
if (firstEdit.status === EditStatus.Applied) {
reconciliationPath.push({
...firstEdit.steps,
before: firstEdit.before,
after: firstEdit.after,
length: firstEdit.steps.length,
});
}
const lowestIndex = this.log.getIndexOfId(firstEdit.id) + 1;
const highestIndex = this.log.getIndexOfId(editId) - 1;
for (let index = lowestIndex; index <= highestIndex; ++index) {
const edit = this.getEditResultFromIndex(index);
if (edit.status === EditStatus.Applied) {
reconciliationPath.push({
...edit.steps,
before: edit.before,
after: edit.after,
length: edit.steps.length,
});
}
}
}
}
}
}
}
return target[prop];
},
});
}
/**
* @returns Edit information for the earliest known sequenced edit.
*/
public earliestSequencedEditInSession(): { edit: Edit<TChange>; sequenceNumber: number } | undefined {
const earliestEditIndex = this.log.earliestAvailableEditIndex;
const lastSequencedEdit = this.log.numberOfSequencedEdits + earliestEditIndex - 1;
for (let index = earliestEditIndex; index <= lastSequencedEdit; ++index) {
const edit = this.log.getEditInSessionAtIndex(index);
const editOrderedId = this.log.getOrderedEditId(edit.id) as SequencedOrderedEditId;
if (editOrderedId.sequenceInfo !== undefined) {
return { edit, sequenceNumber: editOrderedId.sequenceInfo.sequenceNumber };
}
}
return undefined;
}
/**
* @returns Edit result information for the edit at the given `index`.
*/
private getEditResultFromIndex(index: number): EditingResultWithId<TChange> {
const edit = this.log.getEditInSessionAtIndex(index);
const before = this.getSnapshotInSession(index);
const resultAfter = this.getEditResultInSession(index + 1);
if (resultAfter.status === undefined) {
fail('The status of every edit in session should be known');
}
return resultAfter.status === EditStatus.Applied
? {
id: edit.id,
status: EditStatus.Applied,
before,
changes: edit.changes,
after: resultAfter.snapshot,
steps: resultAfter.steps,
}
: {
id: edit.id,
status: resultAfter.status,
before,
changes: edit.changes,
};
}
/**
* @param sequenceNumber - The server-assigned sequenced number assigned to the edit of interest.
* @returns Edit result information for the edit with the given sequence number. Undefined if no such edit is known.
*/
public getEditResultFromSequenceNumber(sequenceNumber: number): EditingResultWithId<TChange> | undefined {
const earliestSequenced = this.earliestSequencedEditInSession();
if (earliestSequenced !== undefined && sequenceNumber >= earliestSequenced.sequenceNumber) {
const lowestIndex = this.log.getIndexOfId(earliestSequenced.edit.id);
const highestIndex = this.log.numberOfSequencedEdits - 1;
for (let index = highestIndex; index >= lowestIndex; --index) {
const edit = this.log.getEditInSessionAtIndex(index);
const orderedId = this.log.getOrderedEditId(edit.id) as SequencedOrderedEditId;
// If `orderedId.sequenceInfo.sequenceNumber` is equal to the requested `sequenceNumber` then we have found the edit of
// interest and simply return its associated information.
// Note that the check bellow also is also satisfied if `orderedId.sequenceInfo.sequenceNumber`is lower than the requested
// `sequenceNumber`. This can happen when the edit for the requested `sequenceNumber` has either not yet been received or
// has been processed by a different DDS (several DDSes can share the same stream of operations and will only see those
// relevant to them). In such cases, we return the edit info for the last known edit before that.
if (orderedId.sequenceInfo && orderedId.sequenceInfo.sequenceNumber <= sequenceNumber) {
const before = this.getSnapshotInSession(index);
const resultAfter = this.getEditResultInSession(index + 1);
if (resultAfter.status === undefined) {
fail('The status of every edit in session should be known');
}
return resultAfter.status === EditStatus.Applied
? {
id: edit.id,
status: EditStatus.Applied,
before,
changes: edit.changes,
after: resultAfter.snapshot,
steps: resultAfter.steps,
}
: {
id: edit.id,
status: resultAfter.status,
before,
changes: edit.changes,
};
}
}
}
return undefined;
}
} | the_stack |
import { createProgram, Program, createModuleResolutionCache, TypeChecker, getOriginalNode, Declaration, isIdentifier, isToken } from 'typescript';
import { Type, Expression, WrappedNodeExpr, ExternalExpr } from '@angular/compiler';
import { readConfiguration } from '@angular/compiler-cli';
import { NgCompilerHost } from '@angular/compiler-cli/src/ngtsc/core';
import { NgCompilerOptions } from '@angular/compiler-cli/src/ngtsc/core/api';
import { InjectableDecoratorHandler, PipeDecoratorHandler, DirectiveDecoratorHandler, ReferencesRegistry, NoopReferencesRegistry, NgModuleDecoratorHandler, ComponentDecoratorHandler } from '@angular/compiler-cli/src/ngtsc/annotations';
import { NgtscCompilerHost, FileSystem, LogicalFileSystem, NodeJSFileSystem } from '@angular/compiler-cli/src/ngtsc/file_system';
import { TypeScriptReflectionHost, ClassDeclaration } from '@angular/compiler-cli/src/ngtsc/reflection';
import { PartialEvaluator } from '@angular/compiler-cli/src/ngtsc/partial_evaluator';
import { IncrementalDriver } from '@angular/compiler-cli/src/ngtsc/incremental';
import { DefaultImportTracker, ReferenceEmitStrategy, AliasingHost, Reference, ReferenceEmitter, LogicalProjectStrategy, RelativePathStrategy, PrivateExportAliasingHost, LocalIdentifierStrategy, AbsoluteModuleStrategy, AliasStrategy, UnifiedModulesStrategy, UnifiedModulesAliasingHost, ModuleResolver } from '@angular/compiler-cli/src/ngtsc/imports';
import { InjectableClassRegistry, CompoundMetadataRegistry, DtsMetadataReader, LocalMetadataRegistry, CompoundMetadataReader, TemplateMapping } from '@angular/compiler-cli/src/ngtsc/metadata';
import { MetadataDtsModuleScopeResolver, LocalModuleScopeRegistry, ComponentScopeReader } from '@angular/compiler-cli/src/ngtsc/scope';
import { getSourceFileOrNull, isDtsPath, isFromDtsFile } from '@angular/compiler-cli/src/ngtsc/util/src/typescript';
import { NgModuleRouteAnalyzer } from '@angular/compiler-cli/src/ngtsc/routing';
import { CycleAnalyzer, ImportGraph } from '@angular/compiler-cli/src/ngtsc/cycles';
import { AdapterResourceLoader } from '@angular/compiler-cli/src/ngtsc/resource';
import { ReferenceGraph } from '@angular/compiler-cli/src/ngtsc/entry_point';
import { DtsTransformRegistry, DecoratorHandler, CompilationMode } from '@angular/compiler-cli/src/ngtsc/transform';
import { PerfRecorder, NOOP_PERF_RECORDER } from '@angular/compiler-cli/src/ngtsc/perf';
import { ModuleWithProvidersScanner } from '@angular/compiler-cli/src/ngtsc/modulewithproviders';
import { NgModuleSymbol } from './module.symbol';
import { NgastTraitCompiler } from './trait-compiler';
import { ComponentSymbol } from './component.symbol';
import { symbolFactory, FactoryOutput } from './find-symbol';
import { InjectableSymbol } from './injectable.symbol';
import { DirectiveSymbol } from './directive.symbol';
import { PipeSymbol } from './pipe.symbol';
import { AnnotationNames, getDtsAnnotation, getLocalAnnotation } from './utils';
import { ProviderRegistry } from './provider';
import { dirname, join } from 'path';
interface Toolkit {
program: Program;
host: NgCompilerHost;
traitCompiler: NgastTraitCompiler;
// Handler
injectableHandler: InjectableDecoratorHandler;
pipeHandler: PipeDecoratorHandler;
directiveHandler: DirectiveDecoratorHandler;
moduleHandler: NgModuleDecoratorHandler;
componentHandler: ComponentDecoratorHandler;
templateMapping: TemplateMapping;
checker: TypeChecker;
reflector: TypeScriptReflectionHost;
defaultImportTracker: DefaultImportTracker;
injectableRegistry: InjectableClassRegistry;
evaluator: PartialEvaluator;
dtsReader: DtsMetadataReader;
metaRegistry: CompoundMetadataRegistry;
scopeRegistry: LocalModuleScopeRegistry;
metaReader: CompoundMetadataReader;
aliasingHost: AliasingHost | null;
localMetaReader: LocalMetadataRegistry;
refEmitter: ReferenceEmitter;
referencesRegistry: ReferencesRegistry;
routeAnalyzer: NgModuleRouteAnalyzer;
resourceLoader: AdapterResourceLoader;
moduleResolver: ModuleResolver;
cycleAnalyzer: CycleAnalyzer;
incrementalDriver: IncrementalDriver;
dtsTransforms: DtsTransformRegistry;
mwpScanner: ModuleWithProvidersScanner;
providerRegistry: ProviderRegistry;s
}
// code from :
// https://github.com/angular/angular/blob/9.1.x/packages/compiler-cli/src/ngtsc/core/src/compiler.ts#L821
class ReferenceGraphAdapter implements ReferencesRegistry {
constructor(private graph: ReferenceGraph) {}
add(source: Declaration, ...references: Reference<Declaration>[]): void {
for (const {node} of references) {
let sourceFile = node.getSourceFile();
if (sourceFile === undefined) {
sourceFile = getOriginalNode(node).getSourceFile();
}
// Only record local references (not references into .d.ts files).
if (sourceFile === undefined || !isDtsPath(sourceFile.fileName)) {
this.graph.add(source, node);
}
}
}
}
// All the code here comes from the ngtsc Compiler file, for more detail see :
// https://github.com/angular/angular/blob/9.1.x/packages/compiler-cli/src/ngtsc/core/src/compiler.ts
export class WorkspaceSymbols {
private options: NgCompilerOptions;
private rootNames: string[];
private toolkit: Partial<Toolkit> = {};
private isCore = false;
private analysed = false;
private oldProgram: Program
constructor(
private tsconfigPath: string,
private fs: FileSystem = new NodeJSFileSystem(),
private perfRecorder: PerfRecorder = NOOP_PERF_RECORDER
) {
const config = readConfiguration(this.tsconfigPath);
this.options = config.options;
this.rootNames = config.rootNames;
}
/////////////////////////////
// ------ PUBLIC API ----- //
/////////////////////////////
/** Process all classes in the program */
get traitCompiler() {
return this.lazy('traitCompiler', () => new NgastTraitCompiler(
[this.componentHandler, this.directiveHandler as any, this.pipeHandler, this.injectableHandler, this.moduleHandler] as DecoratorHandler<unknown, unknown, unknown>[],
this.reflector,
this.perfRecorder,
this.incrementalDriver,
this.options.compileNonExportedClasses !== false,
CompilationMode.FULL,
this.dtsTransforms,
)
);
}
/** Collects information about local NgModules, Directives, Components, and Pipes (declare in the ts.Program) */
public get scopeRegistry() {
return this.lazy('scopeRegistry', () => {
const depScopeReader = new MetadataDtsModuleScopeResolver(this.dtsReader, this.aliasingHost);
return new LocalModuleScopeRegistry(this.localMetaReader, depScopeReader, this.refEmitter, this.aliasingHost);
});
}
/** Evaluate typescript Expression & update the dependency graph accordingly */
public get evaluator() {
return this.lazy('evaluator', () => new PartialEvaluator(
this.reflector,
this.checker,
this.incrementalDriver.depGraph
));
}
/** Keep track of the providers other than Injectable */
get providerRegistry() {
return this.lazy('providerRegistry', () => new ProviderRegistry(this))
}
public getClassRecords() {
this.ensureAnalysis();
return this.traitCompiler.allRecords();
}
public getAllModules() {
this.ensureAnalysis();
return this.traitCompiler.allRecords('NgModule').map(({ node }) => new NgModuleSymbol(this, node));
}
public getAllComponents() {
this.ensureAnalysis();
return this.traitCompiler.allRecords('Component').map(({ node }) => new ComponentSymbol(this, node));
}
public getAllDirectives() {
this.ensureAnalysis();
return this.traitCompiler.allRecords('Directive').map(({ node }) => new DirectiveSymbol(this, node));
}
public getAllInjectable() {
this.ensureAnalysis();
return this.traitCompiler.allRecords('Injectable').map(({ node }) => new InjectableSymbol(this, node));
}
public getAllPipes() {
this.ensureAnalysis();
return this.traitCompiler.allRecords('Pipe').map(({ node }) => new PipeSymbol(this, node));
}
/** Find a symbol based on the class expression */
public findSymbol(token: Expression, relativeTo: string = '') {
if (token instanceof WrappedNodeExpr) {
if (isIdentifier(token.node)) {
const decl = this.reflector.getDeclarationOfIdentifier(token.node);
if (decl?.node && this.reflector.isClass(decl.node)) {
return this.getSymbol(decl.node);
} else if (decl?.node) {
return this.providerRegistry.getProvider(decl.node);
}
} else if (isToken(token.node)) {
return this.providerRegistry.getProvider(token.node);
}
} else if (token instanceof ExternalExpr) {
const dir = dirname(relativeTo);
const module = token.value.moduleName ?? '';
const moduleName = module.endsWith('.ts') ? module : `${module}.ts`;
const path = join(dir, moduleName);
return this.getAllInjectable().find(injectable => {
return injectable.path === path && injectable.name === token.value.name
});
}
}
/** Find a symbol based on the class expression */
public getSymbol<A extends AnnotationNames>(node: ClassDeclaration): FactoryOutput<A> | undefined {
const isDts = isFromDtsFile(node);
let annotation: AnnotationNames | undefined;
if (isDts) {
const members = this.reflector.getMembersOfClass(node);
annotation = getDtsAnnotation(members);
} else {
annotation = getLocalAnnotation(node.decorators);
}
if (annotation && (annotation in symbolFactory)) {
const factory = symbolFactory[annotation];
return factory(this, node) as FactoryOutput<A>;
}
}
/////////////////////////
// ----- PRIVATE ----- //
/////////////////////////
/** Angular wrapper around the typescript host compiler */
// TODO: add reusable program
private get host() {
return this.lazy('host', () => {
const baseHost = new NgtscCompilerHost(this.fs, this.options);
return NgCompilerHost.wrap(baseHost, this.rootNames, this.options, this.oldProgram || null);
});
}
/** Typescript program */
private get program() {
return this.lazy('program', () => createProgram({
host: this.host,
rootNames: this.host.inputFiles,
options: this.options
})
);
}
/** Handler for @Injectable() annotations */
private get injectableHandler() {
return this.lazy('injectableHandler', () => new InjectableDecoratorHandler(
this.reflector,
this.defaultImportTracker,
this.isCore,
this.options.strictInjectionParameters || false,
this.injectableRegistry
)
);
}
/** Handler for @Pipe() annotations */
private get pipeHandler() {
return this.lazy('pipeHandler', () => new PipeDecoratorHandler(
this.reflector,
this.evaluator,
this.metaRegistry,
this.scopeRegistry,
this.defaultImportTracker,
this.injectableRegistry,
this.isCore
)
);
}
/** Handler for @Directive() annotations */
private get directiveHandler() {
return this.lazy('directiveHandler', () => new DirectiveDecoratorHandler(
this.reflector,
this.evaluator,
this.metaRegistry,
this.scopeRegistry,
this.metaReader,
this.defaultImportTracker,
this.injectableRegistry,
this.isCore,
!!this.options.annotateForClosureCompiler,
!!this.options.compileNonExportedClasses
)
);
}
/** Handler for @NgModule() annotations */
private get moduleHandler() {
return this.lazy('moduleHandler', () => new NgModuleDecoratorHandler(
this.reflector,
this.evaluator,
this.metaReader,
this.metaRegistry,
this.scopeRegistry,
this.referencesRegistry,
this.isCore,
this.routeAnalyzer,
this.refEmitter,
this.host.factoryTracker,
this.defaultImportTracker,
!!this.options.annotateForClosureCompiler,
this.injectableRegistry,
this.options.i18nInLocale
)
);
}
/** Handler for @Component() annotations */
private get componentHandler() {
return this.lazy('componentHandler', () => new ComponentDecoratorHandler(
this.reflector,
this.evaluator,
this.metaRegistry,
this.metaReader,
this.scopeReader,
this.scopeRegistry,
this.templateMapping,
this.isCore,
this.resourceLoader,
this.host.rootDirs,
this.options.preserveWhitespaces || false,
this.options.i18nUseExternalIds !== false,
this.options.enableI18nLegacyMessageIdFormat !== false,
false,
this.options.i18nNormalizeLineEndingsInICUs,
this.moduleResolver,
this.cycleAnalyzer,
this.refEmitter,
this.defaultImportTracker,
this.incrementalDriver.depGraph,
this.injectableRegistry,
!!this.options.annotateForClosureCompiler,
)
);
}
/** Static reflection of declarations using the TypeScript type checker */
private get reflector() {
return this.lazy('reflector', () => new TypeScriptReflectionHost(this.checker));
}
/** Typescript type checker use to semantically analyze a source file */
private get checker() {
return this.lazy('checker', () => this.program.getTypeChecker());
}
/** Register metadata from local NgModules, Directives, Components, and Pipes */
private get metaRegistry() {
return this.lazy('metaRegistry', () => new CompoundMetadataRegistry([ this.localMetaReader, this.scopeRegistry ]));
}
/** Register metadata from local declaration files (.d.ts) */
private get metaReader() {
return this.lazy('metaReader', () => new CompoundMetadataReader([ this.localMetaReader, this.dtsReader ]));
}
/** Registers and records usages of Identifers that came from default import statements (import X from 'some/module') */
private get defaultImportTracker() {
return this.lazy('defaultImportTracker', () => new DefaultImportTracker());
}
/** Keeps track of classes that can be constructed via dependency injection (e.g. injectables, directives, pipes) */
private get injectableRegistry() {
return this.lazy('injectableRegistry', () => new InjectableClassRegistry(this.reflector));
}
// @todo() support oldProgram https://github.com/angular/angular/blob/master/packages/compiler-cli/src/ngtsc/core/src/compiler.ts#L130
private get incrementalDriver() {
return this.lazy('incrementalDriver', () => IncrementalDriver.fresh(this.program));
}
private get templateMapping() {
return this.lazy('templateMapping', () => new TemplateMapping());
}
/** (pre)Load resources using cache */
private get resourceLoader() {
return this.lazy('resourceLoader', () => new AdapterResourceLoader(this.host, this.options));
}
/** Resolve the module source-files references in lazy-loaded routes */
private get moduleResolver() {
return this.lazy('moduleResolver', () => {
const moduleResolutionCache = createModuleResolutionCache(
this.host.getCurrentDirectory(),
fileName => this.host.getCanonicalFileName(fileName)
);
return new ModuleResolver(this.program, this.options, this.host, moduleResolutionCache);
});
}
/** Entry source file of the host */
private get entryPoint() {
return this.host.entryPoint !== null ? getSourceFileOrNull(this.program, this.host.entryPoint) : null;
}
/** Generates and consumes alias re-exports */
private get aliasingHost() {
return this.lazy('aliasingHost', () => {
let aliasingHost: AliasingHost | null = null;
const { _useHostForImportGeneration, generateDeepReexports } = this.options;
if (this.host.unifiedModulesHost === null || !_useHostForImportGeneration) {
if (this.entryPoint === null && generateDeepReexports === true) {
aliasingHost = new PrivateExportAliasingHost(this.reflector);
}
} else {
aliasingHost = new UnifiedModulesAliasingHost(this.host.unifiedModulesHost);
}
return aliasingHost;
});
}
/** Generates `Expression`s which refer to `Reference`s in a given context. */
private get refEmitter() {
return this.lazy('refEmitter', () => {
const { rootDir, rootDirs, _useHostForImportGeneration } = this.options;
let refEmitter: ReferenceEmitter;
if (this.host.unifiedModulesHost === null || !_useHostForImportGeneration) {
let localImportStrategy: ReferenceEmitStrategy;
if (rootDir !== undefined || rootDirs?.length) {
localImportStrategy = new LogicalProjectStrategy(
this.reflector,
new LogicalFileSystem([ ...this.host.rootDirs ], this.host)
);
} else {
localImportStrategy = new RelativePathStrategy(this.reflector);
}
refEmitter = new ReferenceEmitter([
new LocalIdentifierStrategy(),
new AbsoluteModuleStrategy(this.program, this.checker, this.moduleResolver, this.reflector),
localImportStrategy,
]);
} else {
refEmitter = new ReferenceEmitter([
new LocalIdentifierStrategy(),
new AliasStrategy(),
new UnifiedModulesStrategy(this.reflector, this.host.unifiedModulesHost),
]);
}
return refEmitter;
});
}
/** A registry of directive, pipe, and module metadata for types defined in the current compilation */
private get localMetaReader() {
return this.lazy('localMetaReader', () => new LocalMetadataRegistry());
}
/** A `MetadataReader` that can read metadata from `.d.ts` files, which have static Ivy properties */
private get dtsReader() {
return this.lazy('dtsReader', () => new DtsMetadataReader(this.checker, this.reflector));
}
/** Read information about the compilation scope of components. */
private get scopeReader() {
return this.scopeRegistry as ComponentScopeReader;
}
/** Used by DecoratorHandlers to register references during analysis */
private get referencesRegistry() {
return this.lazy('referencesRegistry', () => {
let referencesRegistry: ReferencesRegistry;
if (this.entryPoint !== null) {
const exportReferenceGraph = new ReferenceGraph();
referencesRegistry = new ReferenceGraphAdapter(exportReferenceGraph);
} else {
referencesRegistry = new NoopReferencesRegistry();
}
return referencesRegistry;
});
}
/** Analyzes a `ts.Program` for cycles. */
private get cycleAnalyzer() {
return this.lazy('cycleAnalyzer', () => {
const importGraph = new ImportGraph(this.moduleResolver);
return new CycleAnalyzer(importGraph);
});
}
/** Keeps track of declaration transform (`DtsTransform`) per source file */
private get dtsTransforms() {
return this.lazy('dtsTransforms', () => new DtsTransformRegistry());
}
/** Scan `ModuleWithProvider` classes */
private get mwpScanner() {
return this.lazy('mwpScanner', () => new ModuleWithProvidersScanner(this.reflector, this.evaluator, this.refEmitter));
}
/** Analyze lazy loaded routes */
public get routeAnalyzer() {
return this.lazy('routeAnalyzer', () => new NgModuleRouteAnalyzer(this.moduleResolver, this.evaluator));
}
/** Lazy load & memorize every tool in the `WorkspaceSymbols`'s toolkit */
private lazy<K extends keyof Toolkit>(key: K, load: () => Toolkit[K]): Toolkit[K] {
if (!this.toolkit[key]) {
this.toolkit[key] = load();
}
return this.toolkit[key] as Toolkit[K];
}
/** Perform analysis on all projects */
private analyzeAll() {
// Analyse all files
const analyzeSpan = this.perfRecorder.start('analyze');
for (const sf of this.program.getSourceFiles()) {
if (sf.isDeclarationFile) {
continue;
}
const analyzeFileSpan = this.perfRecorder.start('analyzeFile', sf);
this.traitCompiler.analyzeSync(sf);
// Scan for ModuleWithProvider
const addTypeReplacement = (node: Declaration, type: Type): void => {
this.dtsTransforms.getReturnTypeTransform(sf).addTypeReplacement(node, type);
};
this.mwpScanner.scan(sf, { addTypeReplacement });
this.perfRecorder.stop(analyzeFileSpan);
}
this.perfRecorder.stop(analyzeSpan);
// Resolve compilation
this.traitCompiler.resolve();
// Record NgModule Scope dependencies
const recordSpan = this.perfRecorder.start('recordDependencies');
const depGraph = this.incrementalDriver.depGraph;
for (const scope of this.scopeRegistry.getCompilationScopes()) {
const file = scope.declaration.getSourceFile();
const ngModuleFile = scope.ngModule.getSourceFile();
depGraph.addTransitiveDependency(ngModuleFile, file);
depGraph.addDependency(file, ngModuleFile);
const meta = this.metaReader.getDirectiveMetadata(new Reference<ClassDeclaration<any>>(scope.declaration));
// For components
if (meta !== null && meta.isComponent) {
depGraph.addTransitiveResources(ngModuleFile, file);
for (const directive of scope.directives) {
depGraph.addTransitiveDependency(file, directive.ref.node.getSourceFile());
}
for (const pipe of scope.pipes) {
depGraph.addTransitiveDependency(file, pipe.ref.node.getSourceFile());
}
}
}
this.perfRecorder.stop(recordSpan);
// Calculate which files need to be emitted
this.incrementalDriver.recordSuccessfulAnalysis(this.traitCompiler);
this.analysed = true;
}
private ensureAnalysis() {
if (!this.analysed) {
this.analyzeAll();
this.providerRegistry.recordAll();
// TODO: Implements the ProviderRegistry to keep track of FactoryProvider, ValueProvider, ...
}
}
} | the_stack |
import { ConcreteRequest } from "relay-runtime";
import { FragmentRefs } from "relay-runtime";
export type MakeOfferModalQueryVariables = {
artworkID: string;
};
export type MakeOfferModalQueryResponse = {
readonly artwork: {
readonly " $fragmentRefs": FragmentRefs<"MakeOfferModal_artwork">;
} | null;
};
export type MakeOfferModalQuery = {
readonly response: MakeOfferModalQueryResponse;
readonly variables: MakeOfferModalQueryVariables;
};
/*
query MakeOfferModalQuery(
$artworkID: String!
) {
artwork(id: $artworkID) {
...MakeOfferModal_artwork
id
}
}
fragment CollapsibleArtworkDetails_artwork on Artwork {
image {
url
width
height
}
internalID
title
date
saleMessage
attributionClass {
name
id
}
category
manufacturer
publisher
medium
conditionDescription {
details
}
certificateOfAuthenticity {
details
}
framed {
details
}
dimensions {
in
cm
}
signatureInfo {
details
}
artistNames
}
fragment InquiryMakeOfferButton_artwork on Artwork {
internalID
}
fragment MakeOfferModal_artwork on Artwork {
...CollapsibleArtworkDetails_artwork
...InquiryMakeOfferButton_artwork
internalID
isEdition
editionSets {
internalID
editionOf
isOfferableFromInquiry
listPrice {
__typename
... on Money {
display
}
... on PriceRange {
display
}
}
dimensions {
cm
in
}
id
}
}
*/
const node: ConcreteRequest = (function(){
var v0 = [
{
"defaultValue": null,
"kind": "LocalArgument",
"name": "artworkID"
}
],
v1 = [
{
"kind": "Variable",
"name": "id",
"variableName": "artworkID"
}
],
v2 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "internalID",
"storageKey": null
},
v3 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "id",
"storageKey": null
},
v4 = [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "details",
"storageKey": null
}
],
v5 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "in",
"storageKey": null
},
v6 = {
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "cm",
"storageKey": null
},
v7 = [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "display",
"storageKey": null
}
];
return {
"fragment": {
"argumentDefinitions": (v0/*: any*/),
"kind": "Fragment",
"metadata": null,
"name": "MakeOfferModalQuery",
"selections": [
{
"alias": null,
"args": (v1/*: any*/),
"concreteType": "Artwork",
"kind": "LinkedField",
"name": "artwork",
"plural": false,
"selections": [
{
"args": null,
"kind": "FragmentSpread",
"name": "MakeOfferModal_artwork"
}
],
"storageKey": null
}
],
"type": "Query",
"abstractKey": null
},
"kind": "Request",
"operation": {
"argumentDefinitions": (v0/*: any*/),
"kind": "Operation",
"name": "MakeOfferModalQuery",
"selections": [
{
"alias": null,
"args": (v1/*: any*/),
"concreteType": "Artwork",
"kind": "LinkedField",
"name": "artwork",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"concreteType": "Image",
"kind": "LinkedField",
"name": "image",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "url",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "width",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "height",
"storageKey": null
}
],
"storageKey": null
},
(v2/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "title",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "date",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "saleMessage",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "AttributionClass",
"kind": "LinkedField",
"name": "attributionClass",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "name",
"storageKey": null
},
(v3/*: any*/)
],
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "category",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "manufacturer",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "publisher",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "medium",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "ArtworkInfoRow",
"kind": "LinkedField",
"name": "conditionDescription",
"plural": false,
"selections": (v4/*: any*/),
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "ArtworkInfoRow",
"kind": "LinkedField",
"name": "certificateOfAuthenticity",
"plural": false,
"selections": (v4/*: any*/),
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "ArtworkInfoRow",
"kind": "LinkedField",
"name": "framed",
"plural": false,
"selections": (v4/*: any*/),
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "dimensions",
"kind": "LinkedField",
"name": "dimensions",
"plural": false,
"selections": [
(v5/*: any*/),
(v6/*: any*/)
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "ArtworkInfoRow",
"kind": "LinkedField",
"name": "signatureInfo",
"plural": false,
"selections": (v4/*: any*/),
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "artistNames",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "isEdition",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "EditionSet",
"kind": "LinkedField",
"name": "editionSets",
"plural": true,
"selections": [
(v2/*: any*/),
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "editionOf",
"storageKey": null
},
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "isOfferableFromInquiry",
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": null,
"kind": "LinkedField",
"name": "listPrice",
"plural": false,
"selections": [
{
"alias": null,
"args": null,
"kind": "ScalarField",
"name": "__typename",
"storageKey": null
},
{
"kind": "InlineFragment",
"selections": (v7/*: any*/),
"type": "Money",
"abstractKey": null
},
{
"kind": "InlineFragment",
"selections": (v7/*: any*/),
"type": "PriceRange",
"abstractKey": null
}
],
"storageKey": null
},
{
"alias": null,
"args": null,
"concreteType": "dimensions",
"kind": "LinkedField",
"name": "dimensions",
"plural": false,
"selections": [
(v6/*: any*/),
(v5/*: any*/)
],
"storageKey": null
},
(v3/*: any*/)
],
"storageKey": null
},
(v3/*: any*/)
],
"storageKey": null
}
]
},
"params": {
"id": "a75746e6a1b8771a202701cf0522a330",
"metadata": {},
"name": "MakeOfferModalQuery",
"operationKind": "query",
"text": null
}
};
})();
(node as any).hash = '14c5533ce7f90b3b2e97580107e71738';
export default node; | the_stack |
import * as S from '@apollo-elements/test/schema';
import * as C from '@apollo/client/core';
import { aTimeout, fixture, expect, nextFrame } from '@open-wc/testing';
import { spy, useFakeTimers, SinonFakeTimers, SinonSpy } from 'sinon';
import { html } from 'lit/static-html.js';
import { setupClient, teardownClient } from '@apollo-elements/test';
import './apollo-query';
import { ApolloQueryElement } from './apollo-query';
describe('[components] <apollo-query>', function describeApolloQuery() {
describe('simply instantiating', function() {
let element: ApolloQueryElement;
beforeEach(async function() {
element = await fixture(html`<apollo-query></apollo-query>`);
});
it('has a shadow root', function() {
expect(element.shadowRoot).to.be.ok;
});
it('doesn\'t render anything', function() {
expect(element).shadowDom.to.equal('');
});
describe('setting fetch-policy attr', function() {
it('cache-and-network', async function() {
element.setAttribute('fetch-policy', 'cache-and-network');
await element.updateComplete;
expect(element.fetchPolicy === 'cache-and-network').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('cache-first', async function() {
element.setAttribute('fetch-policy', 'cache-first');
await element.updateComplete;
expect(element.fetchPolicy === 'cache-first').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('cache-only', async function() {
element.setAttribute('fetch-policy', 'cache-only');
await element.updateComplete;
expect(element.fetchPolicy === 'cache-only').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('network-only', async function() {
element.setAttribute('fetch-policy', 'network-only');
await element.updateComplete;
expect(element.fetchPolicy === 'network-only').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('no-cache', async function() {
element.setAttribute('fetch-policy', 'no-cache');
await element.updateComplete;
expect(element.fetchPolicy === 'no-cache').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('standby', async function() {
element.setAttribute('fetch-policy', 'standby');
await element.updateComplete;
expect(element.fetchPolicy === 'standby').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
it('forwards an illegal value', async function() {
element.setAttribute('fetch-policy', 'shmoo');
await element.updateComplete;
// @ts-expect-error: test for bad value
expect(element.fetchPolicy === 'shmoo').to.be.true;
expect(element.fetchPolicy).to.equal(element.controller.options.fetchPolicy);
});
});
describe('setting error-policy attr', function() {
it('all', async function() {
element.setAttribute('error-policy', 'all');
await element.updateComplete;
expect(element.errorPolicy === 'all').to.be.true;
expect(element.errorPolicy).to.equal(element.controller.options.errorPolicy);
});
it('none', async function() {
element.setAttribute('error-policy', 'none');
await element.updateComplete;
expect(element.errorPolicy === 'none').to.be.true;
expect(element.errorPolicy).to.equal(element.controller.options.errorPolicy);
});
it('ignore', async function() {
element.setAttribute('error-policy', 'ignore');
await element.updateComplete;
expect(element.errorPolicy === 'ignore').to.be.true;
expect(element.errorPolicy).to.equal(element.controller.options.errorPolicy);
});
it('forwards an illegal value', async function() {
element.setAttribute('error-policy', 'shmoo');
await element.updateComplete;
// @ts-expect-error: test for bad value
expect(element.errorPolicy === 'shmoo').to.be.true;
expect(element.errorPolicy).to.equal(element.controller.options.errorPolicy);
});
});
describe('setting context', function() {
it('as empty object', async function() {
element.context = {};
await element.updateComplete;
expect(element.controller.options.context).to.be.ok.and.to.be.empty;
});
it('as non-empty object', async function() {
element.context = { a: 'b' };
await element.updateComplete;
expect(element.controller.options.context).to.deep.equal({ a: 'b' });
});
it('as illegal non-object', async function() {
// @ts-expect-error: test bad value
element.context = 1;
await element.updateComplete;
expect(element.controller.options.context).to.equal(1);
});
});
describe('setting client', function() {
it('is null by default', function() {
expect(element.client).to.be.null;
});
it('as global client', async function() {
element.client = window.__APOLLO_CLIENT__!;
await element.updateComplete;
expect(element.controller.client).to.equal(window.__APOLLO_CLIENT__);
});
it('as new client', async function() {
const client = new C.ApolloClient({ cache: new C.InMemoryCache() });
element.client = client;
await element.updateComplete;
expect(element.controller.client).to.equal(client);
});
it('as illegal value', async function() {
// @ts-expect-error: test bad value
element.client = 1;
await element.updateComplete;
expect(element.controller.client).to.equal(1);
});
});
describe('setting loading', function() {
it('as true', async function() {
element.loading = true;
await element.updateComplete;
expect(element.controller.loading).to.equal(true);
});
it('as false', async function() {
element.loading = false;
await element.updateComplete;
expect(element.controller.loading).to.equal(false);
});
it('as illegal value', async function() {
// @ts-expect-error: test bad value
element.loading = 1;
await element.updateComplete;
expect(element.controller.loading).to.equal(1);
});
});
describe('setting query', function() {
it('as DocumentNode', async function() {
const query = C.gql`{ nullable }`;
element.query = query;
await element.updateComplete;
expect(element.controller.query)
.to.equal(query)
.and.to.equal(element.controller.document);
});
it('as TypedDocumentNode', async function() {
const query = C.gql`{ nullable }` as C.TypedDocumentNode<{ a: 'b'}, {a: 'b'}>;
element.query = query;
await element.updateComplete;
expect(element.controller.query).to.equal(query);
const l = element as unknown as ApolloQueryElement<typeof query>;
l.data = { a: 'b' };
// @ts-expect-error: can't assign bad data type
l.data = { b: 'c' };
// @ts-expect-error: can't assign bad variables type
l.variables = { b: 'c' };
});
it('as illegal value', async function() {
expect(() => {
// @ts-expect-error: can't assign bad variables type
element.query = 1;
}).to.throw(/Query must be a parsed GraphQL document./);
await element.updateComplete;
expect(element.query)
.to.be.null.and
.to.equal(element.document).and
.to.equal(element.controller.query).and
.to.equal(element.controller.document);
});
});
describe('setting error', function() {
it('as ApolloError', async function() {
try { throw new C.ApolloError({}); } catch (e) { element.error = e as Error; }
await element.updateComplete;
expect(element.controller.error).to.be.an.instanceof(C.ApolloError);
});
it('as Error', async function() {
try { throw new Error('hi'); } catch (err) { element.error = err as Error; }
await element.updateComplete;
expect(element.controller.error?.message).to.equal('hi');
});
it('as null', async function() {
const error = null;
element.error = error;
await element.updateComplete;
expect(element.controller.error).to.equal(error);
});
it('as illegal value', async function() {
const error = 0;
// @ts-expect-error: test bad value
element.error = error;
await element.updateComplete;
expect(element.controller.error).to.equal(error);
});
});
});
describe('with global client', function() {
beforeEach(setupClient);
afterEach(teardownClient);
describe('with a simple template that renders data', function() {
let element: ApolloQueryElement<typeof S.PaginatedQuery>;
let clock: SinonFakeTimers;
beforeEach(async function() {
element = await fixture(html`
<apollo-query>
<template>{{ (data.pages || []).join(',') }}</template>
</apollo-query>
`);
});
it('uses global client', function() {
expect(element.client).to.be.ok;
});
describe('setting no-auto-subscribe', function() {
beforeEach(() => element.setAttribute('no-auto-subscribe', ''));
beforeEach(nextFrame);
it('reflects', function() {
expect(element.noAutoSubscribe).to.be.true
.and.to.equal(element.controller.options.noAutoSubscribe);
});
describe('then setting query', function() {
beforeEach(() => element.query = S.PaginatedQuery);
beforeEach(nextFrame);
it('doesn\'t render anything', function() {
expect(element).shadowDom.to.equal('');
});
describe('then setting illegal variables', function() {
// @ts-expect-error: bad input
beforeEach(() => element.variables = { boop: 'snoot', offset: 'offset' });
beforeEach(nextFrame);
describe('then calling subscribe()', function() {
beforeEach(() => element.subscribe());
beforeEach(nextFrame);
it('doesn\'t render anything', function() {
expect(element).shadowDom.to.equal('');
});
it('sets error', function() {
expect(element.error?.message).to.equal(`Variable "$offset" got invalid value "offset"; Int cannot represent non-integer value: "offset"`);
});
});
});
describe('then setting variables', function() {
beforeEach(() => element.variables = { offset: 0 });
beforeEach(nextFrame);
it('doesn\'t render anything', function() {
expect(element).shadowDom.to.equal('');
});
it('canAutoSubscribe is false', function() {
expect(element.canAutoSubscribe).to.be.false;
});
describe('then calling subscribe()', function() {
beforeEach(() => element.subscribe());
beforeEach(nextFrame);
it('renders data', function() {
expect(element).shadowDom.to.equal('1,2,3,4,5,6,7,8,9,10');
});
describe('with refetchWritePolicy: \'merge\'', function() {
beforeEach(function() {
element.options = { refetchWritePolicy: 'merge' };
});
describe('then calling refetch', function() {
beforeEach(async () => element.refetch({
offset: (element.variables?.offset ?? 0) + 10,
}));
beforeEach(nextFrame);
it('renders next page', function() {
expect(element).shadowDom.to.equal(`
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20
`);
});
});
describe('then calling fetchMore({ variables })', function() {
beforeEach(async () => element.fetchMore({
variables: { offset: (element.variables?.offset ?? 0) + 10 },
}).catch(() => 0));
beforeEach(nextFrame);
it('renders next page', function() {
expect(element).shadowDom.to.equal(`
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20
`);
});
});
describe('then calling subscribeToMore({ document, updateQuery })', function() {
beforeEach(() => element.subscribeToMore({
document: S.PageAddedSubscription,
updateQuery(_, { subscriptionData }) {
return { pages: [subscriptionData.data!.pageAdded!] };
},
}));
beforeEach(nextFrame);
it('renders data', function() {
expect(element).shadowDom.to.equal('1,2,3,4,5,6,7,8,9,10,11');
});
});
describe('then calling startPolling(1000)', function() {
beforeEach(() => { clock = useFakeTimers(); });
beforeEach(() => spy(element.controller, 'refetch'));
afterEach(() => (element.controller.refetch as SinonSpy).restore?.());
afterEach(() => clock.restore());
beforeEach(function startPolling() { element.startPolling(1000); });
beforeEach(() => { clock.tick(3500); });
it('refetches', function() {
expect(element.controller.refetch).to.have.been.calledThrice;
});
describe('then stopPolling', function() {
beforeEach(function stopPolling() { element.stopPolling(); });
beforeEach(() => { clock.tick(3500); });
it('stops calling refetch', function() {
expect(element.controller.refetch).to.have.been.calledThrice;
});
});
});
});
});
describe('then calling executeQuery()', function() {
beforeEach(() => element.executeQuery());
beforeEach(nextFrame);
it('renders data', function() {
expect(element).shadowDom.to.equal('1,2,3,4,5,6,7,8,9,10');
});
});
describe('when executeQuery() rejects', function() {
let e: Error;
beforeEach(() => element.executeQuery({
variables: {
limit: 1000, // client is programmed to reject this
},
}).catch(err => e = err));
beforeEach(nextFrame);
it('"unrenders" data', function() {
expect(element).shadowDom.to.equal('');
});
it('sets error', function() {
expect(element.error).to.equal(e);
expect(element.error!.message).to.equal('rate limited');
});
});
});
});
});
});
describe('with template attribute set but empty', function() {
let element: ApolloQueryElement;
beforeEach(async function() {
element = await fixture(html`<apollo-query template=""></apollo-query>`);
});
it('has null template', function() {
expect(element.template).to.be.null;
});
});
describe('with template attribute set but no template', function() {
let element: ApolloQueryElement;
beforeEach(async function() {
element = await fixture(html`<apollo-query template="heh"></apollo-query>`);
});
it('has null template', function() {
expect(element.template).to.be.null;
});
});
describe('with `no-shadow` attribute set as a string', function() {
let element: ApolloQueryElement;
beforeEach(async function() {
element = await fixture(html`<apollo-query no-shadow="special"></apollo-query>`);
});
it('creates a special div', function() {
expect(element.querySelector('.special')).to.be.ok;
});
});
describe('with template and query DOM and `no-shadow` attribute set', function() {
let element: ApolloQueryElement<typeof S.NoParamQuery>;
beforeEach(async function() {
element = await fixture(html`
<apollo-query no-shadow>
<script type="application/graphql">
query NoParamQuery {
noParam {
noParam
}
}
</script>
<template>
<h1>Template</h1>
<span id="data">{{ data.noParam.noParam }}</span>
<span id="error">{{ error.message }}</span>
</template>
</apollo-query>
`);
});
beforeEach(() => aTimeout(200));
it('renders', function() {
expect(element.$$('h1').length).to.equal(1);
expect(element.$$('span').length).to.equal(2);
expect(element.$('#data')).to.be.ok;
expect(element.$('#data')?.textContent).to.equal('noParam');
});
it('creates a query-result div', function() {
expect(element.querySelector('.output')).to.be.ok;
});
it('renders to the light DOM', function() {
expect(element.$('#data')).to.equal(element.querySelector('#data'));
});
it('does not blow away template', function() {
expect(element.template).to.be.an.instanceof(HTMLTemplateElement);
});
it('does not blow away query', function() {
expect(element.querySelector('script[type="application/graphql"]')).to.be.ok;
});
});
describe('with `no-shadow` and `template` attributes set', function() {
let element: ApolloQueryElement<typeof S.NoParamQuery>;
beforeEach(async function() {
element = await fixture(html`
<apollo-query no-shadow template="tpl" .query="${S.NoParamQuery}"></apollo-query>
<template id="tpl">
<h1>Template</h1>
<span id="data">{{ data.noParam.noParam }}</span>
<span id="error">{{ error.message }}</span>
</template>
`);
});
beforeEach(nextFrame);
beforeEach(nextFrame);
it('renders', function() {
expect(element.$$('h1').length).to.equal(1);
expect(element.$$('span').length).to.equal(2);
expect(element.$('#data')).to.be.ok;
expect(element.$('#data')?.textContent).to.equal('noParam');
});
it('renders to the light DOM', function() {
expect(element.$('#data')).to.equal(element.querySelector('#data'));
});
it('does not blow away template', function() {
expect(element.template).to.be.an.instanceof(HTMLTemplateElement);
});
});
describe('with template in DOM and a query property', function() {
let element: ApolloQueryElement<typeof S.NoParamQuery>;
beforeEach(async function() {
element = await fixture(html`
<apollo-query .query="${S.NoParamQuery}">
<template>
<h1>Template</h1>
<span id="data">{{ data.noParam.noParam }}</span>
<span id="error">{{ error.message }}</span>
</template>
</apollo-query>
`);
});
beforeEach(() => element.updateComplete);
it('renders', function() {
expect(element.$$('h1').length).to.equal(1);
expect(element.$$('span').length).to.equal(2);
expect(element.$('#data')).to.be.ok;
expect(element.$('#data')?.textContent).to.equal('noParam');
});
it('removes loading attribute', function() {
expect(element.loading, 'property').to.be.false;
expect(element.hasAttribute('loading'), 'attribute').to.be.false;
});
});
describe('with template, query, and variables in DOM', function() {
let element: ApolloQueryElement<typeof S.NullableParamQuery>;
beforeEach(async function() {
element = await fixture(html`
<apollo-query>
<script type="application/graphql">
query NullableParamQuery($nullable: String) {
nullableParam(nullable: $nullable) {
nullable
}
}
</script>
<script type="application/json">
{
"nullable": "DOM"
}
</script>
<template>
<h1>Template</h1>
<span id="data">{{ data.nullableParam.nullable }}</span>
<span id="error">{{ error.message }}</span>
</template>
</apollo-query>
`);
});
beforeEach(nextFrame);
it('canAutoSubscribe is true', function() {
expect(element.canAutoSubscribe).to.be.true;
});
it('renders', function() {
expect(element.$$('h1').length).to.equal(1);
expect(element.$$('span').length).to.equal(2);
expect(element.$('#data')).to.be.ok;
expect(element.$('#data')?.textContent).to.equal('DOM');
});
describe('setting variables property', function() {
beforeEach(function() {
element.variables = { nullable: 'set by js' };
});
beforeEach(nextFrame);
it('rerenders', function() {
expect(element.$('#data')).to.be.ok;
expect(element.$('#data')?.textContent).to.equal('set by js');
});
});
});
describe('when query errors', function() {
let element: ApolloQueryElement<typeof S.NullableParamQuery>;
beforeEach(async function() {
element = await fixture(html`
<apollo-query
.query="${S.NullableParamQuery}"
.variables="${{ nullable: 'error' }}">
<template>
<h1>Template</h1>
<span id="data">{{ data.nullableParam.nullable }}</span>
<span id="error">{{ error.message }}</span>
</template>
</apollo-query>
`);
});
beforeEach(nextFrame);
it('renders', function() {
expect(element.$('#error')).to.be.ok;
expect(element.$('#error')?.textContent).to.equal('error');
});
});
describe('with a list rendering template', function() {
let element: ApolloQueryElement;
beforeEach(async function() {
element = await fixture(html`
<apollo-query>
<template>
<p>{{ data.me.name }}</p>
<ul>
<template type="repeat" repeat="{{ data.friends || [] }}">
<li data-id="{{ item.id }}"
data-index="{{ index }}">{{ item.name }}</li>
</template>
</ul>
</template>
</apollo-query>
`);
});
describe('setting data', function() {
beforeEach(function() {
element.data = {
me: { name: 'ME' },
friends: [
{ id: 'friend-a', name: 'A' },
{ id: 'friend-b', name: 'B' },
{ id: 'friend-c', name: 'C' },
],
};
});
beforeEach(nextFrame);
it('renders the list', function() {
expect(element).shadowDom.to.equal(`
<p>ME</p>
<ul>
<li data-id="friend-a" data-index="0">A</li>
<li data-id="friend-b" data-index="1">B</li>
<li data-id="friend-c" data-index="2">C</li>
</ul>
`);
});
});
});
});
}); | the_stack |
import { compareDates, getItem, mergeStringArrays, resetData } from "./Util";
import fs = require("fs");
import { getMostRecentLogObject, checkIfOnStreak, getLogsSummary, createLog } from "./LogsUtil";
import { getLanguages } from "./LanguageUtil";
import { Summary } from "../models/Summary";
import { Log } from "../models/Log";
import { getTotalMilestonesAchieved, getThreeMostRecentMilestones } from "./MilestonesUtil";
import { getSummaryJsonFilePath, fetchSummaryJsonFileData } from "../managers/FileManager";
import { commands } from "vscode";
let challenge_round = -1;
export function getCurrentChallengeRound() {
if (challenge_round === -1) {
// fetch it from the local summary
let summary: Summary = fetchSummaryJsonFileData();
challenge_round = summary.challenge_round;
}
return challenge_round;
}
export async function restartChallenge() {
// increment the challenge round
challenge_round += 1;
// reset the data
resetData();
// set the challenge round in the local summary
const summary:Summary = new Summary();
summary.challenge_round = challenge_round;
writeToSummaryJson(summary);
// create a log with the new challenge round
const log:Log = new Log();
log.day_number = 1;
log.challenge_round = challenge_round;
await createLog(log);
// show the dashboard view
commands.executeCommand("DoC.viewDashboard");
}
export function deleteSummaryJson() {
const filepath = getSummaryJsonFilePath();
const fileExists = fs.existsSync(filepath);
if (fileExists) {
fs.unlinkSync(filepath);
}
}
export function updateLocalSummary(summaryFromApp: Summary) {
let summary: Summary = fetchSummaryJsonFileData();
// updates local summary if and only if db is as updated
if (summaryFromApp.challenge_round !== summary.challenge_round
|| summaryFromApp.days > summary.days
|| summaryFromApp.hours > summary.hours
|| summaryFromApp.keystrokes > summary.keystrokes) {
const currentLog = getMostRecentLogObject();
summary.hours = summaryFromApp.hours;
summary.days = summaryFromApp.days;
summary.keystrokes = summaryFromApp.keystrokes;
summary.currentLines = summaryFromApp.currentLines;
if (summary.challenge_round < summaryFromApp.challenge_round) {
// update the local challenge round if its behind the challenge from the app
summary.challenge_round = summaryFromApp.challenge_round;
}
summary.longest_streak =
summaryFromApp.longest_streak > summary.longest_streak ? summaryFromApp.longest_streak : summary.longest_streak;
summary.milestones = summaryFromApp.milestones > summary.milestones ? summaryFromApp.milestones : summary.milestones;
summary.shares = summaryFromApp.shares > summary.shares ? summaryFromApp.shares : summary.shares;
summary.languages = mergeStringArrays(summaryFromApp.languages, summary.languages);
if (currentLog && compareDates(new Date(currentLog.date), new Date())) {
summary.currentHours = currentLog.codetime_metrics.hours;
summary.currentKeystrokes = currentLog.codetime_metrics.keystrokes;
summary.currentLines = currentLog.codetime_metrics.lines_added;
}
}
writeToSummaryJson(summary);
}
export function syncSummary() {
console.log("Syncing 100 doc summary");
// Aggregating log data
const aggregateLogData = getLogsSummary();
// Aggregating milestone data
const totalMilestones = getTotalMilestonesAchieved();
let summary: Summary = fetchSummaryJsonFileData();
//aggregate hours has the total hours in the logs, we need to subtract the current day's hours because they are added at the end of the day.
summary.hours = aggregateLogData.totalHours;
summary.lines_added = aggregateLogData.totalLinesAdded;
summary.keystrokes = aggregateLogData.totalKeystrokes;
summary.currentHours = aggregateLogData.currentHours;
summary.currentKeystrokes = aggregateLogData.currentKeystrokes;
summary.currentLines = aggregateLogData.currentLines;
summary.days = aggregateLogData.totalDays;
summary.longest_streak = aggregateLogData.longest_streak;
summary.current_streak = aggregateLogData.current_streak;
summary.milestones = totalMilestones;
summary.recent_milestones = getThreeMostRecentMilestones();
summary.currentDate = aggregateLogData.currentDate;
writeToSummaryJson(summary);
}
export function updateSummaryJson() {
let summary: Summary = fetchSummaryJsonFileData();
const log: Log = getMostRecentLogObject();
const onStreak = checkIfOnStreak();
const currentDate = new Date(summary.currentDate);
const dateOb = new Date();
// if current date is not today, update aggregate data
if (!compareDates(dateOb, currentDate)) {
summary.days += 1;
summary.hours += summary.currentHours;
summary.keystrokes += summary.currentKeystrokes;
summary.lines_added += summary.currentLines;
summary.currentDate = dateOb.valueOf();
if (onStreak) {
summary.current_streak += 1;
if (summary.current_streak > summary.longest_streak) {
summary.longest_streak = summary.current_streak;
}
} else {
summary.current_streak = 1;
}
}
// update day's data
summary.currentHours = log.codetime_metrics.hours;
summary.currentKeystrokes = log.codetime_metrics.keystrokes;
summary.currentLines = log.codetime_metrics.lines_added;
// update languages aggregate and make sure none are repeated
const newLanguages = getLanguages();
if (newLanguages) {
const currLanguages = summary.languages || [];
const totalLanguages = currLanguages.concat(newLanguages);
const reducedLanguages = Array.from(new Set(totalLanguages));
summary.languages = reducedLanguages;
}
summary.lastUpdated = new Date().getTime();
writeToSummaryJson(summary);
}
export function updateSummaryMilestones(newMilestones: Array<number>, totalMilestones: number) {
let summary: Summary = fetchSummaryJsonFileData();
summary.milestones = totalMilestones;
// order milestones in latest to oldest order of achievement
summary.recent_milestones = newMilestones.reverse().concat(summary.recent_milestones);
// limit milestones to 3 for displaying on the dashboard
while (summary.recent_milestones.length > 3) {
summary.recent_milestones.pop();
}
summary.lastUpdated = new Date().getTime();
writeToSummaryJson(summary);
}
export function getSummaryTotalHours() {
let summary: Summary = fetchSummaryJsonFileData();
return summary.hours;
}
export function setSummaryTotalHours(newHours: number) {
let summary: Summary = fetchSummaryJsonFileData();
summary.hours = newHours;
writeToSummaryJson(summary);
}
export function setSummaryCurrentHours(newCurrentHours: number) {
let summary: Summary = fetchSummaryJsonFileData();
summary.currentHours = newCurrentHours;
writeToSummaryJson(summary);
}
export function updateSummaryLanguages() {
// update languages aggregate and make sure none are repeated
const newLanguages = getLanguages();
let summary: Summary = fetchSummaryJsonFileData();
const currLanguages = summary.languages;
const totalLanguages = currLanguages.concat(newLanguages);
const reducedLanguages = Array.from(new Set(totalLanguages));
summary.languages = reducedLanguages;
summary.lastUpdated = new Date().getTime();
writeToSummaryJson(summary);
}
export function incrementSummaryShare() {
const summary: Summary = fetchSummaryJsonFileData();
summary.shares++;
writeToSummaryJson(summary);
}
export function getDaysLevel(daysComplete: number): any {
// based on days milestones
let daysLevel = 0;
let daysProgressPercentage = 0;
if (daysComplete >= 110) {
daysLevel = 6;
daysProgressPercentage = 100;
} else if (daysComplete >= 100) {
daysLevel = 5;
daysProgressPercentage = ((daysComplete - 100) * 100) / (110 - 100);
} else if (daysComplete >= 75) {
daysLevel = 4;
daysProgressPercentage = ((daysComplete - 75) * 100) / (100 - 75);
} else if (daysComplete >= 50) {
daysLevel = 3;
daysProgressPercentage = ((daysComplete - 50) * 100) / (75 - 50);
} else if (daysComplete >= 10) {
daysLevel = 2;
daysProgressPercentage = ((daysComplete - 10) * 100) / (50 - 10);
} else if (daysComplete >= 1) {
daysLevel = 1;
daysProgressPercentage = ((daysComplete - 1) * 100) / (10 - 1);
} else {
daysLevel = 0;
daysProgressPercentage = (daysComplete * 100) / (1 - 0);
}
return { daysLevel, daysProgressPercentage };
}
export function getHoursLevel(hoursCoded: number): any {
// based on hours milestones
let hoursLevel = 0;
let hoursProgressPercentage = 0;
if (hoursCoded >= 200) {
hoursLevel = 6;
hoursProgressPercentage = 100;
} else if (hoursCoded >= 120) {
hoursLevel = 5;
hoursProgressPercentage = ((hoursCoded - 120) * 100) / (200 - 120);
} else if (hoursCoded >= 90) {
hoursLevel = 4;
hoursProgressPercentage = ((hoursCoded - 90) * 100) / (120 - 90);
} else if (hoursCoded >= 60) {
hoursLevel = 3;
hoursProgressPercentage = ((hoursCoded - 60) * 100) / (90 - 60);
} else if (hoursCoded >= 30) {
hoursLevel = 2;
hoursProgressPercentage = ((hoursCoded - 30) * 100) / (60 - 30);
} else if (hoursCoded >= 1) {
hoursLevel = 1;
hoursProgressPercentage = ((hoursCoded - 1) * 100) / (30 - 1);
} else {
hoursLevel = 0;
hoursProgressPercentage = (hoursCoded * 100) / (1 - 0);
}
return { hoursLevel, hoursProgressPercentage };
}
export function getLongStreakLevel(longestStreak: number): any {
// based on streaks milestones
let streaksLevel = 0;
let streaksProgressPercentage = 0;
if (longestStreak >= 100) {
streaksLevel = 6;
streaksProgressPercentage = 100;
} else if (longestStreak >= 60) {
streaksLevel = 5;
streaksProgressPercentage = ((longestStreak - 60) * 100) / (100 - 60);
} else if (longestStreak >= 30) {
streaksLevel = 4;
streaksProgressPercentage = ((longestStreak - 30) * 100) / (60 - 30);
} else if (longestStreak >= 14) {
streaksLevel = 3;
streaksProgressPercentage = ((longestStreak - 14) * 100) / (30 - 14);
} else if (longestStreak >= 7) {
streaksLevel = 2;
streaksProgressPercentage = ((longestStreak - 7) * 100) / (14 - 7);
} else if (longestStreak >= 2) {
streaksLevel = 1;
streaksProgressPercentage = ((longestStreak - 2) * 100) / (7 - 2);
} else {
streaksLevel = 0;
streaksProgressPercentage = (longestStreak * 100) / (2 - 0);
}
return { streaksLevel, streaksProgressPercentage };
}
export function getLinesAddedLevel(linesAdded: number): any {
// based on number of lines added milestones
let linesAddedLevel = 0;
let linesAddedProgressPercentage = 0;
if (linesAdded >= 10000) {
linesAddedLevel = 6;
linesAddedProgressPercentage = 100;
} else if (linesAdded >= 1000) {
linesAddedLevel = 5;
linesAddedProgressPercentage = ((linesAdded - 1000) * 100) / (10000 - 1000);
} else if (linesAdded >= 100) {
linesAddedLevel = 4;
linesAddedProgressPercentage = ((linesAdded - 100) * 100) / (1000 - 100);
} else if (linesAdded >= 50) {
linesAddedLevel = 3;
linesAddedProgressPercentage = ((linesAdded - 50) * 100) / (100 - 50);
} else if (linesAdded >= 16) {
linesAddedLevel = 2;
linesAddedProgressPercentage = ((linesAdded - 16) * 100) / (50 - 16);
} else if (linesAdded >= 1) {
linesAddedLevel = 1;
linesAddedProgressPercentage = ((linesAdded - 1) * 100) / (16 - 1);
} else {
linesAddedLevel = 0;
linesAddedProgressPercentage = (linesAdded * 100) / (1 - 0);
}
return { linesAddedLevel, linesAddedProgressPercentage };
}
export function getAverageHoursLevel(avgHour: number): number {
// based on avg hours for 100 days of code
if (avgHour >= 3.0) {
return 6;
} else if (avgHour >= 2.5) {
return 5;
} else if (avgHour >= 2.0) {
return 4;
} else if (avgHour >= 1.5) {
return 3;
} else if (avgHour >= 1.0) {
return 2;
} else if (avgHour >= 0.5) {
return 1;
} else {
return 0;
}
}
function writeToSummaryJson(summary: Summary) {
const filepath = getSummaryJsonFilePath();
try {
fs.writeFileSync(filepath, JSON.stringify(summary, null, 2));
} catch (err) {
console.log(err);
}
} | the_stack |
import { isEqual } from 'lodash';
import { Engine } from '@kb-abstracts';
import { PullRequestService, RepoService, UserService } from '@kb-api';
import {
IGithubPullRequest,
IGithubPullRequestEvent,
IGithubRepo,
IGithubReview,
IGithubReviewComment,
IGithubUser
} from '@kb-interfaces';
import { IReviewComment, PRStatus, PullRequest, Repo, User } from '@kb-models';
// TODO@Thatkookooguy: #343 Ensure PR exists in db for every event
// TODO@Thatkookooguy: #344 Ensure all users exists in db for every event
// TODO@Thatkookooguy: #345 Ensure repo exists in db for every event
export class GithubEngine extends Engine<IGithubPullRequestEvent> {
constructor(
private usersService: UserService,
private reposService: RepoService,
private pullRequestsService: PullRequestService
) {
super();
}
async handleNewConnection(
eventData: IGithubPullRequestEvent
): Promise<void> {
const repoDto = new Repo({
fullname: eventData.repository.full_name,
name: eventData.repository.name,
url: eventData.repository.html_url,
organization: eventData.repository.owner.type === 'Organization' ?
eventData.repository.owner.login : undefined
});
await this.reposService.create(repoDto);
return;
}
async handlePullRequestOpened(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const creator = this.extractUser(githubCreator);
await this.usersService.create(creator);
let organization: User;
if (githubOwner.type === 'Organization') {
organization = this.extractUser(githubOwner);
await this.usersService.create(organization);
}
const repository = this.extractRepo(eventData.repository);
await this.reposService.create(repository);
const pullRequest = this
.extractPullRequest(githubPR, creator, repository, organization);
await this.pullRequestsService.create(pullRequest);
return;
}
async handlePullRequestInitialLabeled(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
await this.pullRequestsService.addLabels(pr.prid, eventData.label.name);
}
async handlePullRequestLabelAdded(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
await this.pullRequestsService.addLabels(pr.prid, eventData.label.name);
}
async handlePullRequestLabelRemoved(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
await this.pullRequestsService.removeLabels(pr.prid, eventData.label.name);
}
async handlePullRequestEdited(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
await this.pullRequestsService.editPRData(
pr.prid,
{ title: pr.title, description: pr.description },
eventData.changes
);
}
async handlePullRequestAssigneeAdded(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const githubAssignees = eventData.pull_request.assignees;
const assignees = githubAssignees
.map((assignee) => this.extractUser(assignee));
await this.pullRequestsService.updateAssignees(pr.prid, assignees);
}
async handlePullRequestAssigneeRemoved(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const githubAssignees = eventData.pull_request.assignees;
const assignees = githubAssignees
.map((assignee) => this.extractUser(assignee));
await this.pullRequestsService.updateAssignees(pr.prid, assignees);
}
async handlePullRequestReviewRequestAdded(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const reviewer = this.extractUser(eventData.requested_reviewer);
await this.pullRequestsService.updateReviewers(pr.prid, reviewer);
}
async handlePullRequestReviewRequestRemoved(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const reviewer = this.extractUser(eventData.requested_reviewer);
await this.pullRequestsService.updateReviewers(pr.prid, reviewer, true);
}
async handlePullRequestReviewCommentAdded(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const newReviewComment = this.extractReviewComment(eventData.comment);
await this.pullRequestsService.addReviewComment(pr.prid, newReviewComment);
}
async handlePullRequestReviewCommentRemoved(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const reviewComment = this.extractReviewComment(eventData.comment);
await this.pullRequestsService
.removeReviewComment(pr.prid, reviewComment);
}
async handlePullRequestReviewCommentEdited(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const reviewComment = this.extractReviewComment(eventData.comment);
await this.pullRequestsService
.editReviewComment(pr.prid, reviewComment);
}
async handlePullRequestReviewSubmitted(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
const reviewStatus = this.extractReviewStatus(eventData.review);
await this.pullRequestsService
.updateReviewSubmitted(pr.prid, reviewStatus);
}
handlePullRequestMerged(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
eventData: IGithubPullRequestEvent
): Promise<void> {
throw new Error('Method not implemented.');
}
async handlePullRequestClosed(
eventData: IGithubPullRequestEvent
): Promise<void> {
const {
githubCreator,
githubOwner,
githubPR
} = this.extractGithubEntities(eventData);
const pr = this.extractPullRequest(
githubPR,
this.extractUser(githubCreator),
this.extractRepo(eventData.repository),
this.extractUser(githubOwner)
);
await this.pullRequestsService.updatePRStatus(pr.prid, pr.status);
}
private extractGithubEntities(eventData: IGithubPullRequestEvent) {
return {
githubPR: eventData.pull_request,
githubCreator: eventData.pull_request.user,
githubOwner: eventData.repository.owner
};
}
private extractUser(githubUser: IGithubUser) {
if (!githubUser) { return; }
const user = new User({
username: githubUser.login,
url: githubUser.html_url,
avatar: githubUser.avatar_url,
organization: githubUser.type === 'Organization'
});
return user;
}
private extractRepo(githubRepo: IGithubRepo) {
if (!githubRepo) { return; }
const repo = new Repo({
fullname: githubRepo.full_name,
name: githubRepo.name,
url: githubRepo.html_url,
organization: githubRepo.owner.type === 'Organization' ?
githubRepo.owner.login : undefined
});
return repo;
}
private extractReviewStatus(review: IGithubReview) {
return {
id: review.id,
user: this.extractUser(review.user).username,
message: review.body || '',
state: review.state,
createdOn: review.submitted_at,
commit: review.commit_id,
authorAssociation: review.author_association
};
}
private extractPullRequest(
githubPR: IGithubPullRequest,
creator: User,
repository: Repo,
organization?: User
) {
const pullRequest = new PullRequest({
prid: `${ repository.fullname }/pull/${ githubPR.number }`,
title: githubPR.title,
description: githubPR.body,
number: githubPR.number,
creator: creator.username,
createdOn: new Date(githubPR.created_at),
url: githubPR.html_url,
repository: repository.fullname,
status: this.getPRStatus(githubPR)
});
pullRequest.organization = organization && organization.username;
return pullRequest;
}
private extractReviewComment(comment: IGithubReviewComment): IReviewComment {
return {
id: comment.id,
reviewId: comment.pull_request_review_id,
author: this.extractUser(comment.user).username,
message: comment.body,
createdOn: comment.created_at,
edited: isEqual(comment.created_at, comment.updated_at),
apiUrl: comment.url,
file: comment.path,
commit: comment.commit_id
};
}
private getPRStatus(githubPR: IGithubPullRequest) {
return githubPR.state === 'open' ?
PRStatus.OPEN :
(githubPR.merged ? PRStatus.MERGED : PRStatus.CLOSED);
}
} | the_stack |
import type { Percent } from "./Percent";
import type { IPointerEvent } from "../render/backend/Renderer";
import * as $type from "./Type";
import * as $array from "./Array";
import * as $object from "./Object";
import type { IBounds } from "./IBounds";
import { Disposer, IDisposer } from "./Disposer";
/**
* ============================================================================
* DOM FUNCTIONS
* ============================================================================
* @hidden
*/
/**
* Execute a function when DOM is ready.
*
* @since 5.0.2
* @param f Callback
*/
export function ready(f: () => void): void {
if (document.readyState !== "loading") {
f();
} else {
const listener = () => {
if (document.readyState !== "loading") {
document.removeEventListener("readystatechange", listener);
f();
}
};
document.addEventListener("readystatechange", listener);
}
}
/**
* Removes a DOM element.
* @param el Target element
*/
export function removeElement(el: HTMLElement): void {
if (el.parentNode) {
el.parentNode.removeChild(el);
}
}
/**
* Function that adds a disposable event listener directly to a DOM element.
*
* @ignore Exclude from docs
* @param dom A DOM element to add event to
* @param type Event type
* @param listener Event listener
* @returns Disposable event
*/
export function addEventListener<E extends Event>(dom: EventTarget, type: string, listener: (event: E) => void, options?: any): IDisposer {
//@todo proper type check for options: EventListenerOptions | boolean (TS for some reason gives error on passive parameter)
//console.log(type, dom);
dom.addEventListener(type, <EventListener>listener, options || false);
return new Disposer(() => {
dom.removeEventListener(type, <EventListener>listener, options || false);
});
}
/**
* @ignore
*/
export function supports(cap: "touchevents" | "pointerevents" | "mouseevents" | "wheelevents" | "keyboardevents"): boolean {
switch (cap) {
case "touchevents":
//return "ontouchstart" in document.documentElement;
return window.hasOwnProperty("TouchEvent");
case "pointerevents":
return window.hasOwnProperty("PointerEvent");
case "mouseevents":
return window.hasOwnProperty("MouseEvent");
case "wheelevents":
return window.hasOwnProperty("WheelEvent");
case "keyboardevents":
return window.hasOwnProperty("KeyboardEvent");
}
return false;
}
/**
* @ignore
*/
export function getPointerId(event: IPointerEvent) {
let id = (<any>event).pointerId || 0;
//console.log(event);
return id;
}
/**
* Removes focus from any element by shifting focus to body.
*
* @ignore
*/
export function blur(): void {
if (document.activeElement && document.activeElement != document.body) {
if ((<any>document.activeElement).blur) {
(<any>document.activeElement).blur();
}
else {
let input = document.createElement("button");
input.style.position = "fixed";
input.style.top = "0px";
input.style.left = "-10000px";
document.body.appendChild(input);
input.focus();
input.blur();
document.body.removeChild(input);
}
}
}
/**
* Focuses element.
*
* @ignore
*/
export function focus(el: HTMLElement): void {
if (el) {
el.focus();
}
}
/**
* @ignore
*/
export function getRendererEvent(key: string): string {
if (supports("pointerevents")) {
return key;
}
else if (supports("touchevents")) {
switch (key) {
case "pointerover": return "touchstart";
case "pointerout": return "touchend";
case "pointerdown": return "touchstart";
case "pointermove": return "touchmove";
case "pointerup": return "touchend";
case "click": return "click";
case "dblclick": return "dblclick";
}
}
else if (supports("mouseevents")) {
switch (key) {
case "pointerover": return "mouseover";
case "pointerout": return "mouseout";
case "pointerdown": return "mousedown";
case "pointermove": return "mousemove";
case "pointerup": return "mouseup";
case "click": return "click";
case "dblclick": return "dblclick";
}
}
return key;
}
/**
* Determines if pointer event originated from a touch pointer or mouse.
*
* @param ev Original event
* @return Touch pointer?
*/
export function isTouchEvent(ev: MouseEvent | Touch): boolean {
if (typeof Touch !== "undefined" && ev instanceof Touch) {
return true;
}
else if (typeof PointerEvent !== "undefined" && ev instanceof PointerEvent && (<any>ev).pointerType != null) {
switch ((<any>ev).pointerType) {
case "touch":
case "pen":
case 2:
return true;
case "mouse":
case 4:
return false;
default:
return !(ev instanceof MouseEvent);
}
}
else if ((<any>ev).type != null) {
if ((<any>ev).type.match(/^mouse/)) {
return false;
}
}
return true;
}
/**
* Sets style property on DOM element.
*
* @ignore Exclude from docs
*/
export function setStyle(dom: HTMLElement, property: string, value: string | undefined): void {
(<any>dom.style)[property] = value;
}
export function getStyle(dom: HTMLElement, property: string): string | undefined {
return (<any>dom.style)[property];
}
/**
* Checks of element `a` contains element `b`.
*
* @param a Aleged ascendant
* @param b Aleged descendant
* @return Contains?
*/
export function contains(a: Element, b: Element): boolean {
let cursor: Node = b;
while (true) {
if (a === cursor) {
return true;
} else if (cursor.parentNode === null) {
// TODO better ShadowRoot detection
if ((<ShadowRoot>cursor).host == null) {
return false;
} else {
cursor = (<ShadowRoot>cursor).host;
}
} else {
cursor = cursor.parentNode;
}
}
}
/**
* Disables or enables interactivity of a DOM element.
*
* @param target Target element
* @param interactive Interactive?
*/
export function setInteractive(target: HTMLElement, interactive: boolean): void {
if (interactive) {
target.style.pointerEvents = "";
}
else {
target.style.pointerEvents = "none";
}
}
/**
* Returns the shadow root of the element or null
*
* @param a Node
* @return Root
*/
export function getShadowRoot(a: Node): ShadowRoot | null {
let cursor: Node = a;
while (true) {
if (cursor.parentNode === null) {
// TODO better ShadowRoot detection
if ((<ShadowRoot>cursor).host != null) {
return <ShadowRoot>cursor;
} else {
return null;
}
} else {
cursor = cursor.parentNode;
}
}
}
/**
* [rootStylesheet description]
*
* @ignore Exclude from docs
* @todo Description
*/
let rootStylesheet: $type.Optional<CSSStyleSheet>;
/**
* [getStylesheet description]
*
* @ignore Exclude from docs
* @todo Description
* @return [description]
*/
function getStylesheet(element: ShadowRoot | null, nonce: string = ""): CSSStyleSheet {
if (element === null) {
if (rootStylesheet == null) {
// TODO use createElementNS ?
const e = document.createElement("style");
e.type = "text/css";
if (nonce != "") {
e.setAttribute("nonce", nonce)
}
document.head.appendChild(e);
rootStylesheet = e.sheet as CSSStyleSheet;
}
return rootStylesheet;
} else {
// TODO use createElementNS ?
const e = document.createElement("style");
e.type = "text/css";
if (nonce != "") {
e.setAttribute("nonce", nonce)
}
element.appendChild(e);
return e.sheet as CSSStyleSheet;
}
}
/**
* [makeStylesheet description]
*
* @ignore Exclude from docs
* @todo Description
* @param selector [description]
* @return [description]
*/
function appendStylesheet(root: CSSStyleSheet, selector: string): CSSStyleRule {
const index = root.cssRules.length;
root.insertRule(selector + "{}", index);
return root.cssRules[index] as CSSStyleRule;
}
/**
* Defines a class for a CSS rule.
*
* Can be used to dynamically add CSS to the document.
*/
export class StyleRule extends Disposer {
/**
* CSS rule.
*/
private _rule: CSSStyleRule;
/**
* A CSS selector text.
*
* E.g.: `.myClass p`
*
* @param selector CSS selector
*/
public set selector(selector: string) {
this._rule.selectorText = selector;
}
/**
* @return CSS selector
*/
public get selector(): string {
return this._rule.selectorText;
}
/**
* Constructor.
*
* @param selector CSS selector
* @param styles An object of style attribute - value pairs
*/
constructor(element: ShadowRoot | null, selector: string, styles: { [name: string]: string }, nonce: string = "") {
const root = getStylesheet(element, nonce);
// TODO test this
super(() => {
// TODO a bit hacky
const index = $array.indexOf(root.cssRules, this._rule);
if (index === -1) {
throw new Error("Could not dispose StyleRule");
} else {
// TODO if it's empty remove it from the DOM ?
root.deleteRule(index);
}
});
this._rule = appendStylesheet(root, selector);
$object.each(styles, (key, value) => {
this.setStyle(<string>key, value);
});
}
/**
* Sets the same style properties with browser-specific prefixes.
*
* @param name Attribute name
* @param value Attribute value
*/
private _setVendorPrefixName(name: string, value: string): void {
const style = this._rule.style;
style.setProperty("-webkit-" + name, value, "");
style.setProperty("-moz-" + name, value, "");
style.setProperty("-ms-" + name, value, "");
style.setProperty("-o-" + name, value, "");
style.setProperty(name, value, "");
}
/**
* Sets a value for specific style attribute.
*
* @param name Attribute
* @param value Value
*/
public setStyle(name: string, value: string): void {
if (name === "transition") {
this._setVendorPrefixName(name, value);
} else {
this._rule.style.setProperty(name, value, "");
}
}
}
// /**
// * Applies a set of styles to an element. Stores the original styles so they
// * can be restored later.
// *
// * @ignore
// * @param io Element
// */
// export function prepElementForDrag(dom: HTMLElement): void {
// // @todo: save current values
// // Define possible props
// let props = [
// "touchAction", "webkitTouchAction", "MozTouchAction", "MSTouchAction", "msTouchAction", "oTouchAction",
// "userSelect", "webkitUserSelect", "MozUserSelect", "MSUserSelect", "msUserSelect", "oUserSelect",
// "touchSelect", "webkitTouchSelect", "MozTouchSelect", "MSTouchSelect", "msTouchSelect", "oTouchSelect",
// "touchCallout", "webkitTouchCallout", "MozTouchCallout", "MSTouchCallout", "msTouchCallout", "oTouchCallout",
// "contentZooming", "webkitContentZooming", "MozContentZooming", "MSContentZooming", "msContentZooming", "oContentZooming",
// "userDrag", "webkitUserDrag", "MozUserDrag", "MSUserDrag", "msUserDrag", "oUserDrag"
// ];
// for (let i = 0; i < props.length; i++) {
// if (props[i] in dom.style) {
// setStyle(dom, props[i], "none");
// }
// }
// // Remove iOS-specific selection;
// setStyle(dom, "tapHighlightColor", "rgba(0, 0, 0, 0)");
// }
// /**
// * Restores replaced styles
// *
// * @ignore
// * @param io Element
// */
// export function unprepElementForDrag(dom: HTMLElement): void {
// // Define possible props
// let props = [
// "touchAction", "webkitTouchAction", "MozTouchAction", "MSTouchAction", "msTouchAction", "oTouchAction",
// "userSelect", "webkitUserSelect", "MozUserSelect", "MSUserSelect", "msUserSelect", "oUserSelect",
// "touchSelect", "webkitTouchSelect", "MozTouchSelect", "MSTouchSelect", "msTouchSelect", "oTouchSelect",
// "touchCallout", "webkitTouchCallout", "MozTouchCallout", "MSTouchCallout", "msTouchCallout", "oTouchCallout",
// "contentZooming", "webkitContentZooming", "MozContentZooming", "MSContentZooming", "msContentZooming", "oContentZooming",
// "userDrag", "webkitUserDrag", "MozUserDrag", "MSUserDrag", "msUserDrag", "oUserDrag"
// ];
// for (let i = 0; i < props.length; i++) {
// if (props[i] in dom.style) {
// setStyle(dom, props[i], "");
// }
// }
// // Remove iOS-specific selection;
// setStyle(dom, "tapHighlightColor", "");
// }
export function relativeToValue(percent: number | Percent | undefined | null, full: number): number {
if ($type.isNumber(percent)) {
return percent;
} else if (percent != null && $type.isNumber(percent.value) && $type.isNumber(full)) {
return full * percent.value;
} else {
return 0;
}
}
/**
* Returns number of decimals
*
* @ignore Exclude from docs
* @param number Input number
* @return Number of decimals
*/
export function decimalPlaces(number: number): number {
let match = ('' + number).match(/(?:\.(\d+))?(?:[eE]([+-]?\d+))?$/);
if (!match) { return 0; }
return Math.max(0, (match[1] ? match[1].length : 0) - (match[2] ? +match[2] : 0));
}
/**
* ============================================================================
* STRING FORMATTING FUNCTIONS
* ============================================================================
* @hidden
*/
/**
* Pads a string with additional characters to certain length.
*
* @param value A numeric value
* @param len Result string length in characters
* @param char A character to use for padding
* @return Padded value as string
*/
export function padString(value: any, len: number = 0, char: string = "0"): string {
if (typeof value !== "string") {
value = value.toString();
}
return len > value.length ? Array(len - value.length + 1).join(char) + value : value;
}
export function trimLeft(text: string): string {
return text.replace(/^[\n \t]+/, "");
}
export function trimRight(text: string): string {
return text.replace(/^[\n \t]+$/, "");
}
export function trim(text: string): string {
return trimLeft(trimRight(text));
}
/**
* Tries to determine format type.
*
* @ignore Exclude from docs
* @param format Format string
* @return Format type ("string" | "number" | "date" | "duration")
*/
export function getFormat(format: string): string {
// Undefined?
if (typeof format === "undefined") {
return "string";
}
// Cleanup and lowercase format
format = format.toLowerCase().replace(/^\[[^\]]*\]/, "");
// Remove style tags
format = format.replace(/\[[^\]]+\]/, "");
// Trim
format = format.trim();
// Check for any explicit format hints (i.e. /Date)
let hints: RegExpMatchArray | null = format.match(/\/(date|number|duration)$/);
if (hints) {
return hints[1];
}
// Check for explicit hints
if (format === "number") {
return "number";
}
if (format === "date") {
return "date";
}
if (format === "duration") {
return "duration";
}
// Detect number formatting symbols
if (format.match(/[#0]/)) {
return "number";
}
// Detect date formatting symbols
if (format.match(/[ymwdhnsqaxkzgtei]/)) {
return "date";
}
// Nothing? Let's display as string
return "string";
}
/**
* Cleans up format:
* * Strips out formatter hints
*
* @ignore Exclude from docs
* @param format Format
* @return Cleaned format
*/
export function cleanFormat(format: string): string {
return format.replace(/\/(date|number|duration)$/i, "");
}
/**
* Strips all tags from the string.
*
* @param text Source string
* @return String without tags
*/
export function stripTags(text: string): string {
return text ? text.replace(/<[^>]*>/g, "") : text;
}
/**
* Removes new lines and tags from a string.
*
* @param text String to conver
* @return Converted string
*/
export function plainText(text: string): string {
return text ? stripTags(("" + text).replace(/[\n\r]+/g, ". ")) : text;
}
/**
* Escapes string so it can safely be used in a Regex.
*
* @param value Unsescaped string
* @return Escaped string
*/
export function escapeForRgex(value: string): string {
return value.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
}
/**
* ============================================================================
* DATE-RELATED FUNCTIONS
* ============================================================================
* @hidden
*/
/**
* Returns a year day.
*
* @param date Date
* @param utc Assume UTC dates?
* @return Year day
* @todo Account for UTC
*/
export function getYearDay(date: Date, utc: boolean = false): number {
// TODO: utc needed?
utc;
const start = new Date(date.getFullYear(), 0, 0);
const diff = (date.getTime() - start.getTime()) + ((start.getTimezoneOffset() - date.getTimezoneOffset()) * 60 * 1000);
const oneDay = 1000 * 60 * 60 * 24;
return Math.floor(diff / oneDay);
}
/**
* Returns week number for a given date.
*
* @param date Date
* @param utc Assume UTC dates?
* @return Week number
* @todo Account for UTC
*/
export function getWeek(date: Date, utc: boolean = false): number {
const day = getYearDay(date, utc) - 1;
let week = Math.floor((day - (date.getDay() || 7) + 10) / 7);
if (week === 0) {
week = 53;
}
else if (week === 53) {
week = 1;
}
return week;
}
/**
* Returns a week number in the month.
*
* @param date Source Date
* @param utc Assume UTC dates?
* @return Week number in month
*/
export function getMonthWeek(date: Date, utc: boolean = false): number {
const firstWeek = getWeek(new Date(date.getFullYear(), date.getMonth(), 1), utc);
let currentWeek = getWeek(date, utc);
if (currentWeek == 1) {
currentWeek = 53;
}
return currentWeek - firstWeek + 1;
}
/**
* Returns a year day out of the given week number.
*
* @param week Week
* @param year Year
* @param weekday Weekday
* @param utc Assume UTC dates
* @return Day in a year
*/
export function getDayFromWeek(week: number, year: number, weekday: number = 1, utc: boolean = false): number {
let date = new Date(year, 0, 4, 0, 0, 0, 0);
if (utc) {
date.setUTCFullYear(year);
}
let day = week * 7 + weekday - ((date.getDay() || 7) + 3);
return day;
}
/**
* Returns 12-hour representation out of the 24-hour hours.
*
* @param hours 24-hour number
* @return 12-hour number
*/
export function get12Hours(hours: number, base?: number): number {
if (hours > 12) {
hours -= 12;
}
else if (hours === 0) {
hours = 12;
}
return base != null ? hours + (base - 1) : hours;
}
/**
* Returns a string name of the tome zone.
*
* @param date Date object
* @param long Should return long ("Pacific Standard Time") or short abbreviation ("PST")
* @param savings Include information if it's in daylight savings mode
* @param utc Assume UTC dates
* @return Time zone name
*/
export function getTimeZone(date: Date, long: boolean = false, savings: boolean = false, utc: boolean = false): string {
if (utc) {
return long ? "Coordinated Universal Time" : "UTC";
}
let wotz = date.toLocaleString("UTC");
let wtz = date.toLocaleString("UTC", { timeZoneName: long ? "long" : "short" }).substr(wotz.length);
//wtz = wtz.replace(/[+-]+[0-9]+$/, "");
if (savings === false) {
wtz = wtz.replace(/ (standard|daylight|summer|winter) /i, " ");
}
return wtz;
}
export function capitalizeFirst(text: string): string {
return text.charAt(0).toUpperCase() + text.slice(1);
}
/**
* ============================================================================
* COLOR UTILS
* ============================================================================
*/
/**
* Represents an interface for an object that represents an RGB color.
*/
export interface iRGB {
r: number;
g: number;
b: number;
a?: number;
}
/**
* Represents an interface for an object that represents an HSL color.
*/
export interface iHSL {
h: number;
s: number;
l: number;
a?: number;
}
/**
* The functions below are taken and adapted from Garry Tan's blog post:
* http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c
*
* The further attributions go mjijackson.com, which now seems to be defunct.
*/
/**
* Converts an HSL color value to RGB. Conversion formula
* adapted from http://en.wikipedia.org/wiki/HSL_color_space.
* Assumes h, s, and l are contained in the set [0, 1] and
* returns r, g, and b in the set [0, 255].
*
* Function adapted from:
* http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c
*
* @ignore Exclude from docs
* @param h The hue
* @param s The saturation
* @param l The lightness
* @return The RGB representation
*/
export function hslToRgb(color: iHSL): iRGB {
let r, g, b;
let h = color.h;
let s = color.s;
let l = color.l;
if (s == 0) {
r = g = b = l; // achromatic
} else {
let hue2rgb = function hue2rgb(p: number, q: number, t: number) {
if (t < 0) { t += 1; }
if (t > 1) { t -= 1; }
if (t < 1 / 6) { return p + (q - p) * 6 * t; }
if (t < 1 / 2) { return q; }
if (t < 2 / 3) { return p + (q - p) * (2 / 3 - t) * 6; }
return p;
}
let q = l < 0.5 ? l * (1 + s) : l + s - l * s;
let p = 2 * l - q;
r = hue2rgb(p, q, h + 1 / 3);
g = hue2rgb(p, q, h);
b = hue2rgb(p, q, h - 1 / 3);
}
return {
r: Math.round(r * 255),
g: Math.round(g * 255),
b: Math.round(b * 255)
};
}
/**
* Converts an RGB color value to HSL. Conversion formula
* adapted from http://en.wikipedia.org/wiki/HSL_color_space.
* Assumes r, g, and b are contained in the set [0, 255] and
* returns h, s, and l in the set [0, 1].
*
* Function adapted from:
* http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c
*
* @ignore Exclude from docs
* @param r The red color value
* @param g The green color value
* @param b The blue color value
* @return The HSL representation
*/
export function rgbToHsl(color: iRGB): iHSL {
let r = color.r / 255;
let g = color.g / 255;
let b = color.b / 255;
let max = Math.max(r, g, b);
let min = Math.min(r, g, b);
let h = 0;
let s = 0;
let l = (max + min) / 2;
if (max === min) {
h = s = 0; // achromatic
} else {
let d = max - min;
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r: h = (g - b) / d + (g < b ? 6 : 0); break;
case g: h = (b - r) / d + 2; break;
case b: h = (r - g) / d + 4; break;
}
h /= 6;
}
return {
h: h,
s: s,
l: l
};
}
/**
* Returns a color that is `percent` brighter than the reference color.
*
* @ignore Exclude from docs
* @param color Reference color
* @param percent Brightness percent
* @return Hex code of the new color
*/
export function lighten(rgb: $type.Optional<iRGB>, percent: number): $type.Optional<iRGB> {
if (rgb) {
return {
r: Math.max(0, Math.min(255, rgb.r + getLightnessStep(rgb.r, percent))),
g: Math.max(0, Math.min(255, rgb.g + getLightnessStep(rgb.g, percent))),
b: Math.max(0, Math.min(255, rgb.b + getLightnessStep(rgb.b, percent))),
a: rgb.a
};
} else {
// TODO is this correct ?
return rgb;
}
};
/**
* Gets lightness step.
*
* @ignore Exclude from docs
* @param value Value
* @param percent Percent
* @return Step
*/
export function getLightnessStep(value: number, percent: number): number {
let base = percent > 0 ? 255 - value : value;
return Math.round(base * percent);
}
/**
* Returns a color that is `percent` brighter than the source `color`.
*
* @ignore Exclude from docs
* @param color Source color
* @param percent Brightness percent
* @return New color
*/
export function brighten(rgb: $type.Optional<iRGB>, percent: number): $type.Optional<iRGB> {
if (rgb) {
let base = Math.min(Math.max(rgb.r, rgb.g, rgb.b), 230);
//let base = Math.max(rgb.r, rgb.g, rgb.b);
let step = getLightnessStep(base, percent);
return {
r: Math.max(0, Math.min(255, Math.round(rgb.r + step))),
g: Math.max(0, Math.min(255, Math.round(rgb.g + step))),
b: Math.max(0, Math.min(255, Math.round(rgb.b + step))),
a: rgb.a
};
} else {
// TODO is this correct ?
return rgb;
}
};
/**
* Returns brightness step.
*
* @ignore Exclude from docs
* @param value Value
* @param percent Percent
* @return Step
*/
export function getBrightnessStep(_value: number, percent: number): number {
let base = 255; //percent > 0 ? 255 - value : value;
return Math.round(base * percent);
}
/**
* Returns `true` if color is "light". Useful indetermining which contrasting
* color to use for elements over this color. E.g.: you would want to use
* black text over light background, and vice versa.
*
* @ignore Exclude from docs
* @param color Source color
* @return Light?
*/
export function isLight(color: iRGB): boolean {
return ((color.r * 299) + (color.g * 587) + (color.b * 114)) / 1000 >= 128;
}
/**
* Returns a new [[iRGB]] object based on `rgb` parameter with specific
* saturation applied.
*
* `saturation` can be in the range of 0 (fully desaturated) to 1 (fully
* saturated).
*
* @ignore Exclude from docs
* @param color Base color
* @param saturation Saturation (0-1)
* @return New color
*/
export function saturate(rgb: $type.Optional<iRGB>, saturation: number): $type.Optional<iRGB> {
if (rgb === undefined || saturation == 1) {
return rgb;
}
let hsl = rgbToHsl(rgb);
hsl.s = saturation;
return hslToRgb(hsl);
}
export function alternativeColor(color: iRGB, lightAlternative: iRGB = { r: 255, g: 255, b: 255 }, darkAlternative: iRGB = { r: 255, g: 255, b: 255 }): iRGB {
let light = lightAlternative;
let dark = darkAlternative;
if (isLight(darkAlternative)) {
light = darkAlternative;
dark = lightAlternative;
}
return isLight(color) ? dark : light;
}
/**
* @ignore
* @deprecated
*/
// export function unshiftThemeClass(settings: any, themeClass: string) {
// let themeClasses = settings.themeClasses;
// if (!themeClasses) {
// themeClasses = [];
// }
// themeClasses.unshift(themeClass);
// settings.themeClasses = themeClasses;
// }
/**
* @ignore
* @deprecated
*/
// export function pushThemeClass(settings: any, themeClass: string) {
// let themeClasses = settings.themeClasses;
// if (!themeClasses) {
// themeClasses = [];
// }
// themeClasses.push(themeClass);
// settings.themeClasses = themeClasses;
// }
/**
* @ignore
*/
export function mergeTags(tags1: string[] | undefined, tags2: string[]): string[] {
if (!tags1) {
tags1 = [];
}
return [...tags1, ...tags2].filter((value, index, self) => {
return self.indexOf(value) === index;
});
}
/**
* @ignore
*/
export function sameBounds(a: IBounds, b?: IBounds): boolean {
if (!b) {
return false;
}
if (a.left != b.left) {
return false
}
if (a.right != b.right) {
return false
}
if (a.top != b.top) {
return false
}
if (a.bottom != b.bottom) {
return false
}
return true;
} | the_stack |
import { PageConfig, URLExt } from '@jupyterlab/coreutils';
import log from 'electron-log';
const extensions: any = {
'./electron-extension': require('./electron-extension'),
'./desktop-extension': require('./desktop-extension'),
// turn off menu customization for now
// './utils-extension': require('./utils-extension'),
/**
* Following extensions are defined under `extensions` in `package.json` and
* are loaded eagerly by webpack module federation, which makes it possible
* to share them with pre-built user extensions.
*
* For the user prebuilt extensions to work, in addition to the list below,
* all core packages need to be listed with an appropriate version under
* `resolutions` in `package.json`.
*/
'@jupyterlab/apputils-extension': require('@jupyterlab/apputils-extension'),
'@jupyterlab/celltags-extension': require('@jupyterlab/celltags-extension'),
'@jupyterlab/codemirror-extension': require('@jupyterlab/codemirror-extension'),
'@jupyterlab/completer-extension': require('@jupyterlab/completer-extension'),
'@jupyterlab/console-extension': require('@jupyterlab/console-extension'),
'@jupyterlab/csvviewer-extension': require('@jupyterlab/csvviewer-extension'),
'@jupyterlab/debugger-extension': require('@jupyterlab/debugger-extension'),
'@jupyterlab/docmanager-extension': require('@jupyterlab/docmanager-extension'),
'@jupyterlab/docprovider-extension': require('@jupyterlab/docprovider-extension'),
'@jupyterlab/documentsearch-extension': require('@jupyterlab/documentsearch-extension'),
'@jupyterlab/filebrowser-extension': require('@jupyterlab/filebrowser-extension'),
'@jupyterlab/fileeditor-extension': require('@jupyterlab/fileeditor-extension'),
'@jupyterlab/help-extension': require('@jupyterlab/help-extension'),
'@jupyterlab/htmlviewer-extension': require('@jupyterlab/htmlviewer-extension'),
'@jupyterlab/imageviewer-extension': require('@jupyterlab/imageviewer-extension'),
'@jupyterlab/inspector-extension': require('@jupyterlab/inspector-extension'),
'@jupyterlab/launcher-extension': require('@jupyterlab/launcher-extension'),
'@jupyterlab/logconsole-extension': require('@jupyterlab/logconsole-extension'),
'@jupyterlab/mainmenu-extension': require('@jupyterlab/mainmenu-extension'),
'@jupyterlab/markdownviewer-extension': require('@jupyterlab/markdownviewer-extension'),
'@jupyterlab/mathjax2-extension': require('@jupyterlab/mathjax2-extension'),
'@jupyterlab/notebook-extension': require('@jupyterlab/notebook-extension'),
'@jupyterlab/rendermime-extension': require('@jupyterlab/rendermime-extension'),
'@jupyterlab/running-extension': require('@jupyterlab/running-extension'),
'@jupyterlab/settingeditor-extension': require('@jupyterlab/settingeditor-extension'),
'@jupyterlab/shortcuts-extension': require('@jupyterlab/shortcuts-extension'),
'@jupyterlab/statusbar-extension': require('@jupyterlab/statusbar-extension'),
'@jupyterlab/terminal-extension': require('@jupyterlab/terminal-extension'),
'@jupyterlab/theme-dark-extension': require('@jupyterlab/theme-dark-extension'),
'@jupyterlab/theme-light-extension': require('@jupyterlab/theme-light-extension'),
'@jupyterlab/toc-extension': require('@jupyterlab/toc-extension'),
'@jupyterlab/tooltip-extension': require('@jupyterlab/tooltip-extension'),
'@jupyterlab/translation-extension': require('@jupyterlab/translation-extension'),
'@jupyterlab/ui-components-extension': require('@jupyterlab/ui-components-extension'),
'@jupyterlab/vdom-extension': require('@jupyterlab/vdom-extension')
} as { [key: string]: any };
const mimeExtensions: any = {
'@jupyterlab/javascript-extension': require('@jupyterlab/javascript-extension'),
'@jupyterlab/json-extension': require('@jupyterlab/json-extension'),
'@jupyterlab/pdf-extension': require('@jupyterlab/pdf-extension'),
'@jupyterlab/vega5-extension': require('@jupyterlab/vega5-extension')
} as { [key: string]: any };
const disabled = { patterns: [] as string[], matches: [] as string[] };
const deferred = { patterns: [] as string[], matches: [] as string[] };
const ignored: string[] = [];
function loadExtensions(extensions: { [key: string]: any }): any[] {
const enabled: any[] = [];
for (const extensionName of Object.keys(extensions)) {
try {
if (PageConfig.Extension.isDeferred(extensionName)) {
deferred.matches.push(extensionName);
ignored.push(extensionName);
}
if (PageConfig.Extension.isDisabled(extensionName)) {
disabled.matches.push(extensionName);
} else {
const extension = extensions[extensionName];
if (Array.isArray(extension)) {
extension.forEach(function (plugin) {
if (PageConfig.Extension.isDeferred(plugin.id)) {
deferred.matches.push(plugin.id);
ignored.push(plugin.id);
}
if (PageConfig.Extension.isDisabled(plugin.id)) {
disabled.matches.push(plugin.id);
return;
}
enabled.push(plugin);
});
} else {
enabled.push(extension);
}
}
} catch (e) {
log.error(e);
}
}
return enabled;
}
/**
* Iterate over active plugins in an extension.
*
* #### Notes
* This also populates the disabled
*/
function* activePlugins(extension: any) {
// Handle commonjs or es2015 modules
let exports;
if (Object.prototype.hasOwnProperty.call(extension, '__esModule')) {
exports = extension.default;
} else {
// CommonJS exports.
exports = extension;
}
let plugins = Array.isArray(exports) ? exports : [exports];
for (let plugin of plugins) {
if (PageConfig.Extension.isDisabled(plugin.id)) {
disabled.matches.push(plugin.id);
continue;
}
yield plugin;
}
}
function loadScript(url: string) {
return new Promise((resolve, reject) => {
const newScript = document.createElement('script');
newScript.onerror = reject;
newScript.onload = resolve;
newScript.async = true;
document.head.appendChild(newScript);
newScript.src = url;
});
}
async function loadComponent(url: any, scope: any) {
await loadScript(url);
// From https://webpack.js.org/concepts/module-federation/#dynamic-remote-containers
// eslint-disable-next-line no-undef
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// eslint-disable-next-line no-undef
await __webpack_init_sharing__('default');
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const container = window._JUPYTERLAB[scope];
// Initialize the container, it may provide shared modules and may need ours
// eslint-disable-next-line no-undef
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// eslint-disable-next-line no-undef
await container.init(__webpack_share_scopes__.default);
}
async function createModule(scope: any, module: any) {
try {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const factory = await window._JUPYTERLAB[scope].get(module);
return factory();
} catch (e) {
console.warn(
`Failed to create module: package: ${scope}; module: ${module}`
);
throw e;
}
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export async function main() {
const jupyterlab = loadExtensions(extensions);
const mime = loadExtensions(mimeExtensions);
// based on https://github.com/jupyterlab/retrolab/blob/main/app/index.js
await Promise.all(jupyterlab);
await Promise.all(mime);
const extension_data = JSON.parse(
PageConfig.getOption('federated_extensions')
);
const federatedExtensionPromises: any[] = [];
const federatedMimeExtensionPromises: any[] = [];
const federatedStylePromises: any[] = [];
let labExtensionUrl = PageConfig.getOption('fullLabextensionsUrl');
const baseUrl = PageConfig.getOption('baseUrl');
const allFederatedExtensions = await Promise.allSettled(
extension_data.map(async (data: any) => {
await loadComponent(
URLExt.join(baseUrl, labExtensionUrl, data.name, data.load),
data.name
);
return data;
})
);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
allFederatedExtensions.forEach(p => {
if (p.status === 'rejected') {
// There was an error loading the component
console.error(p.reason);
return;
}
const data = p.value;
if (data.extension) {
federatedExtensionPromises.push(createModule(data.name, data.extension));
}
if (data.mimeExtension) {
federatedMimeExtensionPromises.push(
createModule(data.name, data.mimeExtension)
);
}
if (data.style) {
federatedStylePromises.push(createModule(data.name, data.style));
}
});
// Add the federated extensions.
const federatedExtensions = await Promise.allSettled(
federatedExtensionPromises
);
federatedExtensions.forEach(p => {
if (p.status === 'fulfilled') {
for (let plugin of activePlugins(p.value)) {
jupyterlab.push(plugin);
}
} else {
console.error(p.reason);
}
});
// Add the federated mime extensions.
const federatedMimeExtensions = await Promise.allSettled(
federatedMimeExtensionPromises
);
federatedMimeExtensions.forEach(p => {
if (p.status === 'fulfilled') {
for (let plugin of activePlugins(p.value)) {
mime.push(plugin);
}
} else {
console.error(p.reason);
}
});
// Load all federated component styles and log errors for any that do not
(await Promise.allSettled(federatedStylePromises))
.filter(({ status }) => status === 'rejected')
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
.forEach(({ reason }) => {
console.error(reason);
});
return {
jupyterlab,
mime,
disabled,
deferred,
ignored
};
} | the_stack |
import toArray from 'lodash.toarray'
import { Editor, Node, Element, Path, Point, Range, Ancestor, Text } from 'slate'
import type { IDomEditor } from './interface'
import { Key } from '../utils/key'
import TextArea from '../text-area/TextArea'
import Toolbar from '../menus/bar/Toolbar'
import HoverBar from '../menus/bar/HoverBar'
import {
EDITOR_TO_ELEMENT,
ELEMENT_TO_NODE,
KEY_TO_ELEMENT,
NODE_TO_INDEX,
NODE_TO_KEY,
NODE_TO_PARENT,
EDITOR_TO_TEXTAREA,
EDITOR_TO_TOOLBAR,
EDITOR_TO_HOVER_BAR,
EDITOR_TO_WINDOW,
} from '../utils/weak-maps'
import $, {
DOMElement,
DOMNode,
DOMPoint,
DOMRange,
DOMSelection,
DOMStaticRange,
isDOMElement,
normalizeDOMPoint,
isDOMSelection,
hasShadowRoot,
walkTextNodes,
} from '../utils/dom'
import { IS_CHROME, IS_FIREFOX } from '../utils/ua'
/**
* 自定义全局 command
*/
export const DomEditor = {
/**
* Return the host window of the current editor.
*/
getWindow(editor: IDomEditor): Window {
const window = EDITOR_TO_WINDOW.get(editor)
if (!window) {
throw new Error('Unable to find a host window element for this editor')
}
return window
},
/**
* Find a key for a Slate node.
* key 即一个累加不重复的 id ,每一个 slate node 都对对应一个 key ,意思相当于 node.id
*/
findKey(editor: IDomEditor | null, node: Node): Key {
let key = NODE_TO_KEY.get(node)
// 如果没绑定,就立马新建一个 key 来绑定
if (!key) {
key = new Key()
NODE_TO_KEY.set(node, key)
}
return key
},
setNewKey(node: Node) {
const key = new Key()
NODE_TO_KEY.set(node, key)
},
/**
* Find the path of Slate node.
* path 是一个数组,代表 slate node 的位置 https://docs.slatejs.org/concepts/03-locations#path
*/
findPath(editor: IDomEditor | null, node: Node): Path {
const path: Path = []
let child = node
// eslint-disable-next-line
while (true) {
const parent = NODE_TO_PARENT.get(child)
if (parent == null) {
if (Editor.isEditor(child)) {
// 已到达最顶层,返回 patch
return path
} else {
break
}
}
// 获取该节点在父节点中的 index
const i = NODE_TO_INDEX.get(child)
if (i == null) {
break
}
// 拼接 patch
path.unshift(i)
// 继续向上递归
child = parent
}
throw new Error(`Unable to find the path for Slate node: ${JSON.stringify(node)}`)
},
/**
* Find the DOM node that implements DocumentOrShadowRoot for the editor.
*/
findDocumentOrShadowRoot(editor: IDomEditor): Document | ShadowRoot {
if (editor.isDestroyed) {
return window.document
}
const el = DomEditor.toDOMNode(editor, editor)
const root = el.getRootNode()
if ((root instanceof Document || root instanceof ShadowRoot) && root.getSelection != null) {
return root
}
return el.ownerDocument
},
/**
* 获取父节点
* @param editor editor
* @param node cur node
*/
getParentNode(editor: IDomEditor | null, node: Node): Ancestor | null {
return NODE_TO_PARENT.get(node) || null
},
/**
* 获取当前节点的所有父节点
* @param editor editor
* @param node cur node
*/
getParentsNodes(editor: IDomEditor, node: Node): Ancestor[] {
const nodes: Ancestor[] = []
let curNode = node
while (curNode !== editor && curNode != null) {
const parentNode = DomEditor.getParentNode(editor, curNode)
if (parentNode == null) {
break
} else {
nodes.push(parentNode)
curNode = parentNode
}
}
return nodes
},
/**
* 获取当前节点对应的顶级节点
* @param editor editor
* @param curNode cur node
*/
getTopNode(editor: IDomEditor, curNode: Node): Node {
const path = DomEditor.findPath(editor, curNode)
const topPath = [path[0]]
return Node.get(editor, topPath)
},
/**
* Find the native DOM element from a Slate node or editor.
*/
toDOMNode(editor: IDomEditor, node: Node): HTMLElement {
let domNode
const isEditor = Editor.isEditor(node)
if (isEditor) {
domNode = EDITOR_TO_ELEMENT.get(editor)
} else {
const key = DomEditor.findKey(editor, node)
domNode = KEY_TO_ELEMENT.get(key)
}
if (!domNode) {
throw new Error(`Cannot resolve a DOM node from Slate node: ${JSON.stringify(node)}`)
}
return domNode
},
/**
* Check if a DOM node is within the editor.
*/
hasDOMNode(editor: IDomEditor, target: DOMNode, options: { editable?: boolean } = {}): boolean {
const { editable = false } = options
const editorEl = DomEditor.toDOMNode(editor, editor)
let targetEl
// COMPAT: In Firefox, reading `target.nodeType` will throw an error if
// target is originating from an internal "restricted" element (e.g. a
// stepper arrow on a number input). (2018/05/04)
// https://github.com/ianstormtaylor/slate/issues/1819
try {
targetEl = (isDOMElement(target) ? target : target.parentElement) as HTMLElement
} catch (err) {
if (!err.message.includes('Permission denied to access property "nodeType"')) {
throw err
}
}
if (!targetEl) {
return false
}
return (
// 祖先节点中包括 data-slate-editor 属性,即 textarea
targetEl.closest(`[data-slate-editor]`) === editorEl &&
// 通过参数 editable 控制开启是否验证是可编辑元素或零宽字符
(!editable || targetEl.isContentEditable || !!targetEl.getAttribute('data-slate-zero-width'))
)
},
/**
* Find a native DOM range from a Slate `range`.
*
* Notice: the returned range will always be ordinal regardless of the direction of Slate `range` due to DOM API limit.
*
* there is no way to create a reverse DOM Range using Range.setStart/setEnd
* according to https://dom.spec.whatwg.org/#concept-range-bp-set.
*/
toDOMRange(editor: IDomEditor, range: Range): DOMRange {
const { anchor, focus } = range
const isBackward = Range.isBackward(range)
const domAnchor = DomEditor.toDOMPoint(editor, anchor)
const domFocus = Range.isCollapsed(range) ? domAnchor : DomEditor.toDOMPoint(editor, focus)
const window = DomEditor.getWindow(editor)
const domRange = window.document.createRange()
const [startNode, startOffset] = isBackward ? domFocus : domAnchor
const [endNode, endOffset] = isBackward ? domAnchor : domFocus
// A slate Point at zero-width Leaf always has an offset of 0 but a native DOM selection at
// zero-width node has an offset of 1 so we have to check if we are in a zero-width node and
// adjust the offset accordingly.
const startEl = (isDOMElement(startNode) ? startNode : startNode.parentElement) as HTMLElement
const isStartAtZeroWidth = !!startEl.getAttribute('data-slate-zero-width')
const endEl = (isDOMElement(endNode) ? endNode : endNode.parentElement) as HTMLElement
const isEndAtZeroWidth = !!endEl.getAttribute('data-slate-zero-width')
domRange.setStart(startNode, isStartAtZeroWidth ? 1 : startOffset)
domRange.setEnd(endNode, isEndAtZeroWidth ? 1 : endOffset)
return domRange
},
/**
* Find a native DOM selection point from a Slate point.
*/
toDOMPoint(editor: IDomEditor, point: Point): DOMPoint {
const [node] = Editor.node(editor, point.path)
const el = DomEditor.toDOMNode(editor, node)
let domPoint: DOMPoint | undefined
// If we're inside a void node, force the offset to 0, otherwise the zero
// width spacing character will result in an incorrect offset of 1
if (Editor.void(editor, { at: point })) {
// void 节点,offset 必须为 0
point = { path: point.path, offset: 0 }
}
// For each leaf, we need to isolate its content, which means filtering
// to its direct text and zero-width spans. (We have to filter out any
// other siblings that may have been rendered alongside them.)
const selector = `[data-slate-string], [data-slate-zero-width]`
const texts = Array.from(el.querySelectorAll(selector))
let start = 0
for (const text of texts) {
const domNode = text.childNodes[0] as HTMLElement
if (domNode == null || domNode.textContent == null) {
continue
}
const { length } = domNode.textContent
const attr = text.getAttribute('data-slate-length')
const trueLength = attr == null ? length : parseInt(attr, 10)
const end = start + trueLength
if (point.offset <= end) {
const offset = Math.min(length, Math.max(0, point.offset - start))
domPoint = [domNode, offset]
break
}
start = end
}
if (!domPoint) {
throw new Error(`Cannot resolve a DOM point from Slate point: ${JSON.stringify(point)}`)
}
return domPoint
},
/**
* Find a Slate node from a native DOM `element`.
*/
toSlateNode(editor: IDomEditor | null, domNode: DOMNode): Node {
let domEl = isDOMElement(domNode) ? domNode : domNode.parentElement
if (domEl && !domEl.hasAttribute('data-slate-node')) {
domEl = domEl.closest(`[data-slate-node]`)
}
const node = domEl ? ELEMENT_TO_NODE.get(domEl as HTMLElement) : null
if (!node) {
throw new Error(`Cannot resolve a Slate node from DOM node: ${domEl}`)
}
return node
},
/**
* Get the target range from a DOM `event`.
*/
findEventRange(editor: IDomEditor, event: any): Range {
if ('nativeEvent' in event) {
// 兼容 react 的合成事件,DOM 事件中没什么用
event = event.nativeEvent
}
const { clientX: x, clientY: y, target } = event
if (x == null || y == null) {
throw new Error(`Cannot resolve a Slate range from a DOM event: ${event}`)
}
const node = DomEditor.toSlateNode(editor, event.target)
const path = DomEditor.findPath(editor, node)
// If the drop target is inside a void node, move it into either the
// next or previous node, depending on which side the `x` and `y`
// coordinates are closest to.
if (Editor.isVoid(editor, node)) {
const rect = target.getBoundingClientRect()
const isPrev = editor.isInline(node)
? x - rect.left < rect.left + rect.width - x
: y - rect.top < rect.top + rect.height - y
const edge = Editor.point(editor, path, {
edge: isPrev ? 'start' : 'end',
})
const point = isPrev ? Editor.before(editor, edge) : Editor.after(editor, edge)
if (point) {
const range = Editor.range(editor, point)
return range
}
}
// Else resolve a range from the caret position where the drop occured.
let domRange
const { document } = this.getWindow(editor)
// COMPAT: In Firefox, `caretRangeFromPoint` doesn't exist. (2016/07/25)
if (document.caretRangeFromPoint) {
domRange = document.caretRangeFromPoint(x, y)
} else {
const position = document.caretPositionFromPoint(x, y)
if (position) {
domRange = document.createRange()
domRange.setStart(position.offsetNode, position.offset)
domRange.setEnd(position.offsetNode, position.offset)
}
}
if (!domRange) {
throw new Error(`Cannot resolve a Slate range from a DOM event: ${event}`)
}
// Resolve a Slate range from the DOM range.
const range = DomEditor.toSlateRange(editor, domRange, {
exactMatch: false,
suppressThrow: false,
})
return range
},
/**
* Find a Slate range from a DOM range or selection.
*/
toSlateRange<T extends boolean>(
editor: IDomEditor,
domRange: DOMRange | DOMStaticRange | DOMSelection,
options: {
exactMatch: T
suppressThrow: T
}
): T extends true ? Range | null : Range {
const { exactMatch, suppressThrow } = options
const el = isDOMSelection(domRange) ? domRange.anchorNode : domRange.startContainer
let anchorNode
let anchorOffset
let focusNode
let focusOffset
let isCollapsed
if (el) {
if (isDOMSelection(domRange)) {
anchorNode = domRange.anchorNode
anchorOffset = domRange.anchorOffset
focusNode = domRange.focusNode
focusOffset = domRange.focusOffset
// COMPAT: There's a bug in chrome that always returns `true` for
// `isCollapsed` for a Selection that comes from a ShadowRoot.
// (2020/08/08)
// https://bugs.chromium.org/p/chromium/issues/detail?id=447523
if (IS_CHROME && hasShadowRoot()) {
isCollapsed =
domRange.anchorNode === domRange.focusNode &&
domRange.anchorOffset === domRange.focusOffset
} else {
isCollapsed = domRange.isCollapsed
}
} else {
anchorNode = domRange.startContainer
anchorOffset = domRange.startOffset
focusNode = domRange.endContainer
focusOffset = domRange.endOffset
isCollapsed = domRange.collapsed
}
}
if (anchorNode == null || focusNode == null || anchorOffset == null || focusOffset == null) {
throw new Error(`Cannot resolve a Slate range from DOM range: ${domRange}`)
}
const anchor = DomEditor.toSlatePoint(editor, [anchorNode, anchorOffset], {
exactMatch,
suppressThrow,
})
if (!anchor) {
return null as T extends true ? Range | null : Range
}
const focus = isCollapsed
? anchor
: DomEditor.toSlatePoint(editor, [focusNode, focusOffset], { exactMatch, suppressThrow })
if (!focus) {
return null as T extends true ? Range | null : Range
}
// return { anchor, focus } as unknown as T extends true ? Range | null : Range
let range: Range = { anchor: anchor as Point, focus: focus as Point }
// if the selection is a hanging range that ends in a void
// and the DOM focus is an Element
// (meaning that the selection ends before the element)
// unhang the range to avoid mistakenly including the void
if (
Range.isExpanded(range) &&
Range.isForward(range) &&
isDOMElement(focusNode) &&
Editor.void(editor, { at: range.focus, mode: 'highest' })
) {
range = Editor.unhangRange(editor, range, { voids: true })
}
return range as unknown as T extends true ? Range | null : Range
},
/**
* Find a Slate point from a DOM selection's `domNode` and `domOffset`.
*/
toSlatePoint<T extends boolean>(
editor: IDomEditor,
domPoint: DOMPoint,
options: {
exactMatch: T
suppressThrow: T
}
): T extends true ? Point | null : Point {
const { exactMatch, suppressThrow } = options
const [nearestNode, nearestOffset] = exactMatch ? domPoint : normalizeDOMPoint(domPoint)
const parentNode = nearestNode.parentNode as DOMElement
let textNode: DOMElement | null = null
let offset = 0
if (parentNode) {
const voidNode = parentNode.closest('[data-slate-void="true"]')
let leafNode = parentNode.closest('[data-slate-leaf]')
let domNode: DOMElement | null = null
// Calculate how far into the text node the `nearestNode` is, so that we
// can determine what the offset relative to the text node is.
if (leafNode) {
textNode = leafNode.closest('[data-slate-node="text"]')!
const window = DomEditor.getWindow(editor)
const range = window.document.createRange()
range.setStart(textNode, 0)
range.setEnd(nearestNode, nearestOffset)
const contents = range.cloneContents()
const removals = [
...toArray(contents.querySelectorAll('[data-slate-zero-width]')),
...toArray(contents.querySelectorAll('[contenteditable=false]')),
]
removals.forEach(el => {
el!.parentNode!.removeChild(el)
})
// COMPAT: Edge has a bug where Range.prototype.toString() will
// convert \n into \r\n. The bug causes a loop when slate-react
// attempts to reposition its cursor to match the native position. Use
// textContent.length instead.
// https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/10291116/
offset = contents.textContent!.length
domNode = textNode
} else if (voidNode) {
// For void nodes, the element with the offset key will be a cousin, not an
// ancestor, so find it by going down from the nearest void parent.
leafNode = voidNode.querySelector('[data-slate-leaf]')!
// COMPAT: In read-only editors the leaf is not rendered.
if (!leafNode) {
offset = 1
} else {
textNode = leafNode.closest('[data-slate-node="text"]')!
domNode = leafNode
offset = domNode.textContent!.length
domNode.querySelectorAll('[data-slate-zero-width]').forEach(el => {
offset -= el.textContent!.length
})
}
}
if (
domNode &&
offset === domNode.textContent!.length &&
// COMPAT: If the parent node is a Slate zero-width space, editor is
// because the text node should have no characters. However, during IME
// composition the ASCII characters will be prepended to the zero-width
// space, so subtract 1 from the offset to account for the zero-width
// space character.
(parentNode.hasAttribute('data-slate-zero-width') ||
// COMPAT: In Firefox, `range.cloneContents()` returns an extra trailing '\n'
// when the document ends with a new-line character. This results in the offset
// length being off by one, so we need to subtract one to account for this.
(IS_FIREFOX && domNode.textContent?.endsWith('\n')))
) {
offset--
}
}
if (!textNode) {
if (suppressThrow) {
return null as T extends true ? Point | null : Point
}
throw new Error(`Cannot resolve a Slate point from DOM point: ${domPoint}`)
}
// COMPAT: If someone is clicking from one Slate editor into another,
// the select event fires twice, once for the old editor's `element`
// first, and then afterwards for the correct `element`. (2017/03/03)
const slateNode = DomEditor.toSlateNode(editor, textNode!)
const path = DomEditor.findPath(editor, slateNode)
return { path, offset } as T extends true ? Point | null : Point
},
hasRange(editor: IDomEditor, range: Range): boolean {
const { anchor, focus } = range
return Editor.hasPath(editor, anchor.path) && Editor.hasPath(editor, focus.path)
},
getNodeType(node: Node): string {
if (Element.isElement(node)) {
return node.type
}
return ''
},
checkNodeType(node: Node, type: string) {
return this.getNodeType(node) === type
},
getNodesStr(nodes: Node[]): string {
return nodes.map(node => Node.string(node)).join('')
},
getSelectedElems(editor: IDomEditor): Element[] {
const elems: Element[] = []
const nodeEntries = Editor.nodes(editor, { universal: true })
for (let nodeEntry of nodeEntries) {
const [node] = nodeEntry
if (Element.isElement(node)) elems.push(node)
}
return elems
},
getSelectedNodeByType(editor: IDomEditor, type: string): Node | null {
const [nodeEntry] = Editor.nodes(editor, {
match: n => this.checkNodeType(n, type),
universal: true,
})
if (nodeEntry == null) return null
return nodeEntry[0]
},
getSelectedTextNode(editor: IDomEditor): Node | null {
const [nodeEntry] = Editor.nodes(editor, {
match: n => Text.isText(n),
universal: true,
})
if (nodeEntry == null) return null
return nodeEntry[0]
},
isNodeSelected(editor: IDomEditor, node: Node): boolean {
const [nodeEntry] = Editor.nodes(editor, {
match: n => n === node,
universal: true,
})
if (nodeEntry == null) return false
const [n] = nodeEntry
if (n === node) return true
return false
},
isSelectionAtLineEnd(editor: IDomEditor, path: Path): boolean {
const { selection } = editor
if (!selection) return false
const isAtLineEnd =
Editor.isEnd(editor, selection.anchor, path) || Editor.isEnd(editor, selection.focus, path)
return isAtLineEnd
},
// 获取 textarea 实例
getTextarea(editor: IDomEditor): TextArea {
const textarea = EDITOR_TO_TEXTAREA.get(editor)
if (textarea == null) throw new Error('Cannot find textarea instance by editor')
return textarea
},
// 获取 toolbar 实例
getToolbar(editor: IDomEditor): Toolbar | null {
return EDITOR_TO_TOOLBAR.get(editor) || null
},
// 获取 hoverbar 实例
getHoverbar(editor: IDomEditor): HoverBar | null {
return EDITOR_TO_HOVER_BAR.get(editor) || null
},
// 格式化 editor content
normalizeContent(editor: IDomEditor) {
editor.children.forEach((node, index) => {
editor.normalizeNode([node, [index]])
})
},
/**
* 获取:距离触发 maxLength,还可以插入多少字符
* @param editor editor
*/
getLeftLengthOfMaxLength(editor: IDomEditor): number {
const { maxLength, onMaxLength } = editor.getConfig()
// 未设置 maxLength ,则返回 number 最大值
if (typeof maxLength !== 'number' || maxLength <= 0) return Infinity
const editorText = editor.getText().replace(/\r|\n|(\r\n)/g, '') // 去掉换行
const curLength = editorText.length
const leftLength = maxLength - curLength
if (leftLength <= 0) {
// 触发 maxLength 限制,不再继续插入文字
if (onMaxLength) onMaxLength(editor)
}
return leftLength
},
// 清理暴露的 text 节点(拼音输入时经常出现)
cleanExposedTexNodeInSelectionBlock(editor: IDomEditor) {
// 有时候全选删除新增的文本节点可能不在段落内,因此遍历textArea删除掉
const { $textArea } = DomEditor.getTextarea(editor)
const childNodes = $textArea?.[0].childNodes
if (childNodes) {
for (const node of Array.from(childNodes)) {
if (node.nodeType === 3) {
node.remove()
} else {
break
}
}
}
const nodeEntries = Editor.nodes(editor, {
match: n => {
if (Element.isElement(n)) {
if (!editor.isInline(n)) {
// 匹配 block element
return true
}
}
return false
},
universal: true,
})
for (let nodeEntry of nodeEntries) {
if (nodeEntry != null) {
const n = nodeEntry[0]
const elem = DomEditor.toDOMNode(editor, n)
// 只遍历 elem 范围,考虑性能
walkTextNodes(elem, (textNode, parent) => {
const $parent = $(parent)
if ($parent.attr('data-slate-string')) {
return // 正常的 text
}
if ($parent.attr('data-slate-zero-width')) {
return // 正常的 text
}
// 暴露的 text node ,删除
parent.removeChild(textNode)
})
}
}
},
} | the_stack |
import {Injectable, NgZone} from '@angular/core';
import {BehaviorSubject} from 'rxjs/BehaviorSubject';
import {Observable} from 'rxjs/Observable';
import {Observer} from 'rxjs/Observer';
import {fromByteArray} from 'base64-js';
export function doAsync<T>(fn: (T) => Observable<any>): any {
return (obs: Observable<T>) => obs
.concatMap(value => fn(value)
.reduce(() => value, value));
}
import 'rxjs/add/observable/concat';
import 'rxjs/add/observable/defer';
import 'rxjs/add/observable/empty';
import 'rxjs/add/observable/from';
import 'rxjs/add/observable/fromEvent';
import 'rxjs/add/observable/merge';
import 'rxjs/add/observable/of';
import 'rxjs/add/observable/timer';
import 'rxjs/add/operator/concatMap';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/expand';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/first';
import 'rxjs/add/operator/let';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/mergeMap';
import 'rxjs/add/operator/publishReplay';
import 'rxjs/add/operator/reduce';
import 'rxjs/add/operator/share';
import 'rxjs/add/operator/switchMap';
import 'rxjs/add/operator/take';
import 'rxjs/add/operator/takeWhile';
export interface PushOptions {
applicationServerKey?: string;
}
function fromPromise<T>(promiseFn: (() => Promise<T>)): Observable<T> {
return Observable.create(observer => {
promiseFn()
.then(v => observer.next(v))
.then(() => observer.complete())
.catch(err => observer.error(err));
});
}
export interface UpdateEvent {
type: "pending" | "activation";
version?: string;
}
// A push notification registration, including the endpoint URL and encryption keys.
export class NgPushRegistration {
private ps: PushSubscription;
constructor(ps: any) {
this.ps = ps;
}
// Get the authentication key
auth(): string {
return this.key('auth');
}
key(method: string = 'p256dh'): string {
return fromByteArray(new Uint8Array(this.ps.getKey(method)));
}
get url(): string {
return this.ps.endpoint;
}
toJSON(): Object {
return this.ps.toJSON();
}
unsubscribe(): Observable<boolean> {
// TODO: switch to Observable.fromPromise when it's not broken.
return fromPromise(() => this.ps.unsubscribe());
}
}
@Injectable()
export class NgServiceWorker {
// Typed reference to navigator.serviceWorker.
private container: ServiceWorkerContainer;
// Always returns the current controlling worker, or undefined if there isn't one.
private controllingWorker = new BehaviorSubject<ServiceWorker>(undefined);
// Always returns the current controlling worker, and waits for one to exist
// if it does not.
private awaitSingleControllingWorker: Observable<ServiceWorker>;
push: Observable<any>;
updates: Observable<UpdateEvent>;
constructor(private zone: NgZone) {
// Extract a typed version of navigator.serviceWorker.
this.container = (typeof navigator === 'object') && navigator['serviceWorker'] as ServiceWorkerContainer;
if (!!this.container) {
// Final Observable that will always give back the current controlling worker,
// and follow changes over time.
Observable
// Combine current and future controllers.
.concat(
// Current controlling worker (if any).
Observable.of(this.container.controller),
// Future changes of the controlling worker.
Observable
// Track changes of the controlling worker via the controllerchange event.
.fromEvent(this.container, 'controllerchange')
// Read the new controller when it changes.
.map(_ => this.container.controller)
)
// Cache the latest controller for immediate delivery.
.subscribe(
worker => this.controllingWorker.next(worker),
err => this.controllingWorker.error(err),
() => this.controllingWorker.complete(),
);
}
// To make one-off calls to the worker, awaitSingleControllingWorker waits for
// a controlling worker to exist.
this.awaitSingleControllingWorker = this
.controllingWorker
.filter(worker => !!worker)
.take(1);
// Setup the push Observable as a broadcast mechanism for push notifications.
this.push = Observable
.defer(() => this.send({cmd: 'push'}))
.share();
// Setup the updates Observable as a broadcast mechanism for update notifications.
this.updates = Observable
.defer(() => this.send({cmd: 'update'}))
.share();
}
private registrationForWorker(): ((obs: Observable<ServiceWorker>) => Observable<ServiceWorkerRegistration>) {
return (obs: Observable<ServiceWorker>) => obs
.switchMap(worker => fromPromise(() => this.container.getRegistrations() as Promise<ServiceWorkerRegistration[]>)
.expand(regs => Observable.from(regs))
.filter(reg => reg.active === worker)
.take(1)
);
}
// Sends a single message to the worker, and awaits one (or more) responses.
private sendToWorker(worker: ServiceWorker, message: Object): Observable<any> {
// A MessageChannel is sent with the message so responses can be correlated.
const channel = new MessageChannel()
// Observe replies.
const result = new Observable<any>(observer => {
let cancelId = null;
const listener = (event: MessageEvent) => {
const data = event.data;
if (!!data && typeof data === "object" && data.hasOwnProperty('$ngsw') && data.hasOwnProperty('id')) {
cancelId = data['id'];
} else if (data === null) {
observer.complete();
channel.port1.removeEventListener('message', listener);
return;
} else {
observer.next(data);
}
};
channel.port1.addEventListener('message', listener);
return () => {
channel.port1.removeEventListener('message', listener);
this.sendToWorker(worker, {cmd: 'cancel', id: cancelId});
};
})
// Instead of complicating this with 'close' events, complete on a null value.
.takeWhile(v => !!v)
// The message will be sent before the consumer has a chance to subscribe to
// the response Observable, so publishReplay() records any responses and ensures
// they arrive properly.
.publishReplay();
// Connecting actually creates the event subscription and starts recording
// for replay.
result.connect();
// Start receiving message(s).
channel.port1.start();
// Set a magic value in the message.
message['$ngsw'] = true;
worker.postMessage(message, [channel.port2]);
return result;
}
// Send a message to the current controlling worker, waiting for one if needed.
private send(message: Object): Observable<any> {
return this
// Wait for a controlling worker to exist.
.awaitSingleControllingWorker
// Send the message and await any replies. switchMap is chosen so if a new
// controlling worker arrives somehow, the message will still get through.
.switchMap(worker => this.sendToWorker(worker, message));
}
// Send a 'ping' to the worker. The returned Observable will complete when the worker
// acknowledges the message. This provides a test that the worker is alive and listening.
ping(): Observable<any> {
return this.send({
cmd: 'ping'
});
}
log(): Observable<string> {
return this.send({
cmd: 'log'
});
}
activateUpdate(version: string): Observable<boolean> {
return this.send({
cmd: 'activateUpdate',
version,
});
}
registerForPush(pushOptions: PushOptions = {}): Observable<NgPushRegistration> {
return this
// Wait for a controlling worker to exist.
.awaitSingleControllingWorker
// Get the ServiceWorkerRegistration for the worker.
.let(this.registrationForWorker())
// Access the PushManager used to control push notifications.
.map((worker: ServiceWorkerRegistration) => worker.pushManager)
.switchMap(pushManager => {
// Create an Observable to wrap the Promises of the PushManager API.
// TODO: switch to Observable.fromPromise when it's not broken.
// This is extracted as a variable so Typescript infers types correctly.
let reg: Observable<NgPushRegistration> = Observable.create(observer => {
// Function that maps subscriptions to an Angular-specific representation.
let regFromSub = (sub: PushSubscription) => new NgPushRegistration(sub);
pushManager
// First, check for an existing subscription.
.getSubscription()
.then(sub => {
// If there is one, we don't need to register, just return it.
if (!!sub) {
return regFromSub(sub);
}
// No existing subscription, register (with userVisibleOnly: true).
let options = {
userVisibleOnly: true,
} as Object;
if (pushOptions.applicationServerKey) {
let key = atob(pushOptions
.applicationServerKey
.replace(/_/g, '/')
.replace(/-/g, '+'));
let applicationServerKey = new Uint8Array(new ArrayBuffer(key.length));
for (let i = 0; i < key.length; i++) {
applicationServerKey[i] = key.charCodeAt(i);
}
options['applicationServerKey'] = applicationServerKey;
}
return pushManager
.subscribe(options)
.then(regFromSub);
})
// Map from promises to the Observable being returned.
.then(sub => this.zone.run(() => observer.next(sub)))
.then(() => this.zone.run(() => observer.complete()))
.catch(err => this.zone.run(() => observer.error(err)));
});
return reg;
});
}
checkForUpdate(): Observable<boolean> {
return this.send({cmd: 'checkUpdate'});
}
} | the_stack |
import { TextEncoder, TextDecoder } from 'util';
import TCPSocket from './TCPSocket';
/**
* A packet class with utilities for reading and writing from a stream.
* @class
*/
class Packet {
/** The buffered data in the packet. */
public buffer: Buffer = Buffer.alloc(0);
private encoder: TextEncoder = new TextEncoder();
private decoder: TextDecoder = new TextDecoder('utf-8');
/**
* Automatically read a packet from a stream using the Minecraft 1.7+ format.
* @param {TCPSocket} socket The TCP socket to read data from
* @returns {Promise<Packet>} The buffered packet with the data
* @async
*/
static async from(socket: TCPSocket): Promise<Packet> {
const length = await socket.readVarInt();
if (length < 1) return new Packet();
const data = await socket.readBytes(length);
const packet = new Packet();
packet.buffer = data;
return packet;
}
/**
* Reads a byte from the packet data
* @returns {number} The byte read from the packet
*/
readByte(): number {
if (this.buffer.byteLength < 1) throw new Error('Cannot readByte() as buffer is empty');
const value = this.buffer[0];
this.buffer = this.buffer.slice(1);
return value;
}
/**
* Reads bytes from the packet data
* @returns {Buffer} The bytes read from the packet
*/
readBytes(length: number): Buffer {
if (this.buffer.byteLength < 1) throw new Error('Cannot readByte() as buffer is empty');
const value = this.buffer.slice(0, length);
this.buffer = this.buffer.slice(length);
return value;
}
/**
* Write bytes to the packet data
* @param {...number} values The bytes to write to the packet
*/
writeByte(...values: number[]): void {
this.buffer = Buffer.concat([this.buffer, Uint8Array.from(values)]);
}
/**
* Write bytes to the packet data
* @param {Buffer} data The bytes to write to the packet
*/
writeBuffer(data: Buffer | Uint8Array): void {
this.buffer = Buffer.concat([this.buffer, data]);
}
/**
* Reads a short (int16, big-endian) from the packet data
* @returns {number} The int16 read from the packet
*/
readShortBE(): number {
if (this.buffer.byteLength < 2) throw new Error('Cannot readShort() as buffer is empty or too small for type');
const value = this.buffer.readInt16BE();
this.buffer = this.buffer.slice(2);
return value;
}
/**
* Writes a short (int16, big-endian) to the packet data
* @param {number} value The int16 written to the packet
*/
writeShortBE(value: number): void {
const buf = Buffer.alloc(2);
buf.writeInt16BE(value);
this.writeBuffer(buf);
}
/**
* Reads a short (int16, little-endian) from the packet data
* @returns {number} The int16 read from the packet
*/
readShortLE(): number {
if (this.buffer.byteLength < 2) throw new Error('Cannot readShortLE() as buffer is empty or too small for type');
const value = this.buffer.readInt16LE();
this.buffer = this.buffer.slice(2);
return value;
}
/**
* Writes a short (int16, little-endian) to the packet data
* @param {number} value The int16 written to the packet
*/
writeShortLE(value: number): void {
const buf = Buffer.alloc(2);
buf.writeInt16LE(value);
this.writeBuffer(buf);
}
/**
* Reads a short (uint16, big-endian) from the packet data
* @returns {number} The uint16 read from the packet
*/
readUShortBE(): number {
if (this.buffer.byteLength < 2) throw new Error('Cannot readShort() as buffer is empty or too small for type');
const value = this.buffer.readUInt16BE();
this.buffer = this.buffer.slice(2);
return value;
}
/**
* Writes a short (uint16, big-endian) to the packet data
* @param {number} value The uint16 written to the packet
*/
writeUShortBE(value: number): void {
const buf = Buffer.alloc(2);
buf.writeUInt16BE(value);
this.writeBuffer(buf);
}
/**
* Reads a short (uint16, little-endian) from the packet data
* @returns {number} The uint16 read from the packet
*/
readUShortLE(): number {
if (this.buffer.byteLength < 2) throw new Error('Cannot readUShortLE() as buffer is empty or too small for type');
const value = this.buffer.readUInt16LE();
this.buffer = this.buffer.slice(2);
return value;
}
/**
* Writes a short (uint16, little-endian) to the packet data
* @returns {number} The uint16 written to the packet
*/
writeUShortLE(value: number): void {
const buf = Buffer.alloc(2);
buf.writeUInt16LE(value);
this.writeBuffer(buf);
}
/**
* Reads an int (int32, big-endian) from the packet data
* @returns {number} The int32 read from the packet
*/
readIntBE(): number {
if (this.buffer.byteLength < 4) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readInt32BE();
this.buffer = this.buffer.slice(4);
return value;
}
/**
* Writes an int (int32, big-endian) to the packet data
* @param {number} value The int32 written to the packet
*/
writeIntBE(value: number): void {
const buf = Buffer.alloc(4);
buf.writeInt32BE(value);
this.writeBuffer(buf);
}
/**
* Reads an int (int32, little-endian) from the packet data
* @returns {number} The int32 read from the packet
*/
readIntLE(): number {
if (this.buffer.byteLength < 4) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readInt32LE();
this.buffer = this.buffer.slice(4);
return value;
}
/**
* Writes an int (int32, little-endian) to the packet data
* @param {number} value The int32 written to the packet
*/
writeIntLE(value: number): void {
const buf = Buffer.alloc(4);
buf.writeInt32LE(value);
this.writeBuffer(buf);
}
/**
* Reads an int (uint32, big-endian) from the packet data
* @returns {number} The uint32 read from the packet
*/
readUIntBE(): number {
if (this.buffer.byteLength < 4) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readUInt32BE();
this.buffer = this.buffer.slice(4);
return value;
}
/**
* Writes an int (uint32, big-endian) to the packet data
* @param {number} value The uint32 written to the packet
*/
writeUIntBE(value: number): void {
const buf = Buffer.alloc(4);
buf.writeUInt32BE(value);
this.writeBuffer(buf);
}
/**
* Reads an int (uint32, little-endian) from the packet data
* @returns {number} The uint32 read from the packet
*/
readUIntLE(): number {
if (this.buffer.byteLength < 4) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readUInt32LE();
this.buffer = this.buffer.slice(4);
return value;
}
/**
* Writes an int (uint32, little-endian) to the packet data
* @param {number} value The uint32 written to the packet
*/
writeUIntLE(value: number): void {
const buf = Buffer.alloc(4);
buf.writeUInt32LE(value);
this.writeBuffer(buf);
}
/**
* Reads a long (int64, big-endian) from the packet data
* @returns {number} The int64 read from the packet
*/
readLongBE(): bigint {
if (this.buffer.byteLength < 8) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readBigInt64BE();
this.buffer = this.buffer.slice(8);
return value;
}
/**
* Writes a long (int64, big-endian) to the packet data
* @param {bigint} value The int64 written to the packet
*/
writeLongBE(value: bigint): void {
const buf = Buffer.alloc(8);
buf.writeBigInt64BE(value);
this.writeBuffer(buf);
}
/**
* Reads a long (int64, little-endian) from the packet data
* @returns {number} The int64 read from the packet
*/
readLongLE(): bigint {
if (this.buffer.byteLength < 8) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readBigInt64LE();
this.buffer = this.buffer.slice(8);
return value;
}
/**
* Writes a long (int64, little-endian) to the packet data
* @param {bigint} value The int64 written to the packet
*/
writeLongLE(value: bigint): void {
const buf = Buffer.alloc(8);
buf.writeBigInt64LE(value);
this.writeBuffer(buf);
}
/**
* Reads a long (uint64, big-endian) from the packet data
* @returns {number} The uint64 read from the packet
*/
readULongBE(): bigint {
if (this.buffer.byteLength < 8) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readBigUInt64BE();
this.buffer = this.buffer.slice(8);
return value;
}
/**
* Writes a long (uint64, big-endian) to the packet data
* @param {bigint} value The uint64 written to the packet
*/
writeULongBE(value: bigint): void {
const buf = Buffer.alloc(8);
buf.writeBigUInt64BE(value);
this.writeBuffer(buf);
}
/**
* Reads a long (uint64, little-endian) from the packet data
* @returns {number} The uint64 read from the packet
*/
readULongLE(): bigint {
if (this.buffer.byteLength < 8) throw new Error('Cannot readInt() as buffer is empty or too small for type');
const value = this.buffer.readBigUInt64LE();
this.buffer = this.buffer.slice(8);
return value;
}
/**
* Writes a long (uint64, little-endian) to the packet data
* @param {bigint} value The uint64 written to the packet
*/
writeULongLE(value: bigint): void {
const buf = Buffer.alloc(8);
buf.writeBigUInt64LE(value);
this.writeBuffer(buf);
}
/**
* Reads a varint from the packet data
* @returns {number} The varint read from the packet
*/
readVarInt(): number {
let numRead = 0;
let result = 0;
let read: number, value: number;
do {
if (numRead > 4) throw new Error('VarInt exceeds data bounds');
read = this.readByte();
value = (read & 0b01111111);
result |= (value << (7 * numRead));
numRead++;
if (numRead > 5) throw new Error('VarInt is too big');
} while ((read & 0b10000000) != 0);
return result;
}
/**
* Writes a varint to the packet data
* @param {number} value The varint written to the packet
*/
writeVarInt(value: number): void {
do {
let temp = value & 0b01111111;
value >>>= 7;
if (value != 0) {
temp |= 0b10000000;
}
this.writeByte(temp);
} while (value != 0);
}
/**
* Reads a short-prefixed string from the packet data
* @returns {string} The string read from the packet
*/
readString(): string {
const length = this.readVarInt();
const value = this.readBytes(length);
return this.decoder.decode(value);
}
/**
* Writes a short-prefixed string to the packet data
* @param {string} value The string written to the packet
* @param {boolean} [writeLength=true] Write the length to the packet
*/
writeString(value: string, writeLength = true): void {
if (writeLength) this.writeVarInt(value.length);
this.writeBuffer(this.encoder.encode(value));
}
/**
* Reads a null terminated string from the packet data
* @returns {string} The string read from the packet
*/
readStringNT(): string {
let read, bytes = new Uint8Array();
while ((read = this.readByte()) !== 0) {
bytes = Uint8Array.from([...bytes, read]);
}
return this.decoder.decode(bytes);
}
/**
* Writes a null terminated string to the packet
* @param {string} value The string to write to the packet
*/
writeStringNT(value: string): void {
this.writeBuffer(this.encoder.encode(value));
this.writeByte(0);
}
}
export default Packet; | the_stack |
import DataSet from '@antv/data-set';
import { Chart } from '../../../src';
import { createDiv } from '../../util/dom';
describe('Test charts animate', () => {
it('Column Chart', () => {
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data([
{ year: '1951 年', sales: 38 },
{ year: '1952 年', sales: 52 },
{ year: '1956 年', sales: 61 },
{ year: '1957 年', sales: 145 },
{ year: '1958 年', sales: 48 },
{ year: '1959 年', sales: 38 },
{ year: '1960 年', sales: 38 },
{ year: '1962 年', sales: 38 },
]);
chart.scale('sales', {
max: 200,
nice: false,
});
chart.interval().position('year*sales').label('sales');
chart.render();
setTimeout(() => {
chart.changeData([
{ year: '1951 年', sales: 138 },
{ year: '1952 年', sales: 52 },
{ year: '1956 年', sales: 11 },
{ year: '1957 年', sales: 45 },
{ year: '1970 年', sales: 145 },
]);
}, 1000);
});
it('Rose Chart', () => {
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data([
{ year: '1951 年', sales: 38 },
{ year: '1952 年', sales: 52 },
{ year: '1956 年', sales: 61 },
{ year: '1957 年', sales: 145 },
{ year: '1958 年', sales: 48 },
{ year: '1959 年', sales: 38 },
{ year: '1960 年', sales: 38 },
{ year: '1962 年', sales: 38 },
]);
chart.coordinate('polar');
chart
.interval()
.position('year*sales')
.label('sales', {
offset: -20,
layout: {
type: 'limit-in-shape',
},
});
chart.render();
setTimeout(() => {
chart.changeData([
{ year: '1951 年', sales: 138 },
{ year: '1952 年', sales: 52 },
{ year: '1956 年', sales: 11 },
{ year: '1957 年', sales: 45 },
{ year: '1970 年', sales: 145 },
]);
}, 1000);
});
it('Bar Chart', () => {
const data = [
{ country: '巴西', population: 18203 },
{ country: '印尼', population: 23489 },
{ country: '美国', population: 29034 },
{ country: '印度', population: 104970 },
{ country: '中国', population: 131744 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.coordinate().transpose();
// chart.coordinate('polar').transpose();
chart.interval().position('country*population');
chart.render();
});
it('Stack Column Chart', () => {
const data = [
{ name: 'London', 月份: 'Jan.', 月均降雨量: 18.9 },
{ name: 'London', 月份: 'Feb.', 月均降雨量: 28.8 },
{ name: 'London', 月份: 'Mar.', 月均降雨量: 39.3 },
{ name: 'London', 月份: 'Apr.', 月均降雨量: 81.4 },
{ name: 'London', 月份: 'May', 月均降雨量: 47 },
{ name: 'London', 月份: 'Jun.', 月均降雨量: 20.3 },
{ name: 'London', 月份: 'Jul.', 月均降雨量: 24 },
{ name: 'London', 月份: 'Aug.', 月均降雨量: 35.6 },
{ name: 'Berlin', 月份: 'Jan.', 月均降雨量: 12.4 },
{ name: 'Berlin', 月份: 'Feb.', 月均降雨量: 23.2 },
{ name: 'Berlin', 月份: 'Mar.', 月均降雨量: 34.5 },
{ name: 'Berlin', 月份: 'Apr.', 月均降雨量: 99.7 },
{ name: 'Berlin', 月份: 'May', 月均降雨量: 52.6 },
{ name: 'Berlin', 月份: 'Jun.', 月均降雨量: 35.5 },
{ name: 'Berlin', 月份: 'Jul.', 月均降雨量: 37.4 },
{ name: 'Berlin', 月份: 'Aug.', 月均降雨量: 42.4 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.interval().position('月份*月均降雨量').color('name').adjust('stack');
chart.render();
setTimeout(() => {
chart.changeData([
{ name: 'London', 月份: 'Jan.', 月均降雨量: 48.9 },
{ name: 'London', 月份: 'Feb.', 月均降雨量: 28.8 },
{ name: 'Berlin', 月份: 'Jan.', 月均降雨量: 2.4 },
{ name: 'Berlin', 月份: 'Feb.', 月均降雨量: 20.2 },
{ name: 'Berlin', 月份: 'Mar.', 月均降雨量: 34.5 },
]);
}, 3000);
});
it('Pie Chart', () => {
const data = [
{ item: '事例一', count: 40, percent: 0.4 },
{ item: '事例二', count: 21, percent: 0.21 },
{ item: '事例三', count: 17, percent: 0.17 },
{ item: '事例四', count: 13, percent: 0.13 },
{ item: '事例五', count: 9, percent: 0.09 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.coordinate('theta', {
radius: 0.75,
});
chart.tooltip({
showTitle: false,
showMarkers: false,
});
chart.axis(false); // 关闭坐标轴
chart
.interval()
.position('1*percent')
.color('item')
.label('percent', {
content: (d) => {
return `${d.item}: ${d.percent * 100}%`;
},
})
.style({
lineWidth: 1,
stroke: '#fff',
})
.adjust('stack');
chart.render();
setTimeout(() => {
chart.changeData([
{ item: '事例二', count: 21, percent: 0.21 },
{ item: '事例三', count: 17, percent: 0.57 },
{ item: '事例四', count: 13, percent: 0.22 },
]);
}, 1000);
});
it('Line Chart', () => {
const data = [
{ year: '1991', value: 3 },
{ year: '1992', value: 4 },
{ year: '1993', value: 3.5 },
{ year: '1994', value: 5 },
{ year: '1995', value: 4.9 },
{ year: '1996', value: 6 },
{ year: '1997', value: 7 },
{ year: '1998', value: 9 },
{ year: '1999', value: 13 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.scale('value', {
min: 0,
});
chart.scale('year', {
range: [0, 1],
});
chart.tooltip({
showCrosshairs: true, // 展示 Tooltip 辅助线
shared: true,
});
chart.line().position('year*value');
chart.point().position('year*value').size(4).shape('circle').style({
stroke: '#fff',
lineWidth: 1,
});
chart.render();
setTimeout(() => {
chart.changeData([
{ year: '1991', value: 30 },
{ year: '1992', value: 14 },
{ year: '1993', value: 35 },
{ year: '1994', value: 50 },
{ year: '1995', value: 49 },
{ year: '1996', value: 62 },
{ year: '1997', value: 17 },
{ year: '1998', value: 39 },
{ year: '1999', value: 3 },
]);
}, 3000);
});
it('Stack Area Chart', () => {
const data = [
{ country: 'Asia', year: '1750', value: 502 },
{ country: 'Asia', year: '1800', value: 635 },
{ country: 'Asia', year: '1850', value: 809 },
{ country: 'Asia', year: '1900', value: 5268 },
{ country: 'Asia', year: '1950', value: 4400 },
{ country: 'Asia', year: '1999', value: 3634 },
{ country: 'Asia', year: '2050', value: 947 },
{ country: 'Africa', year: '1750', value: 106 },
{ country: 'Africa', year: '1800', value: 107 },
{ country: 'Africa', year: '1850', value: 111 },
{ country: 'Africa', year: '1900', value: 1766 },
{ country: 'Africa', year: '1950', value: 221 },
{ country: 'Africa', year: '1999', value: 767 },
{ country: 'Africa', year: '2050', value: 133 },
{ country: 'Europe', year: '1750', value: 163 },
{ country: 'Europe', year: '1800', value: 203 },
{ country: 'Europe', year: '1850', value: 276 },
{ country: 'Europe', year: '1900', value: 628 },
{ country: 'Europe', year: '1950', value: 547 },
{ country: 'Europe', year: '1999', value: 729 },
{ country: 'Europe', year: '2050', value: 408 },
{ country: 'Oceania', year: '1750', value: 200 },
{ country: 'Oceania', year: '1800', value: 200 },
{ country: 'Oceania', year: '1850', value: 200 },
{ country: 'Oceania', year: '1900', value: 460 },
{ country: 'Oceania', year: '1950', value: 230 },
{ country: 'Oceania', year: '1999', value: 300 },
{ country: 'Oceania', year: '2050', value: 300 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.scale({
year: {
type: 'linear',
tickInterval: 50,
},
});
chart.area().adjust('stack').position('year*value').color('country');
chart.line().adjust('stack').position('year*value').color('country').size(2);
chart.render();
setTimeout(() => {
chart.changeData([
{ country: 'Asia', year: '1750', value: 52 },
{ country: 'Asia', year: '1800', value: 65 },
{ country: 'Asia', year: '1850', value: 89 },
{ country: 'Asia', year: '1900', value: 526 },
{ country: 'Asia', year: '1950', value: 400 },
{ country: 'Asia', year: '1999', value: 364 },
{ country: 'Asia', year: '2050', value: 947 },
{ country: 'Europe', year: '1750', value: 63 },
{ country: 'Europe', year: '1800', value: 203 },
{ country: 'Europe', year: '1850', value: 26 },
{ country: 'Europe', year: '1900', value: 28 },
{ country: 'Europe', year: '1950', value: 547 },
{ country: 'Europe', year: '1999', value: 29 },
{ country: 'Europe', year: '2050', value: 408 },
]);
}, 1000);
});
it('Radar Chart', () => {
const data = [
{ item: 'Design', user: 'a', score: 70 },
{ item: 'Design', user: 'b', score: 30 },
{ item: 'Development', user: 'a', score: 60 },
{ item: 'Development', user: 'b', score: 70 },
{ item: 'Marketing', user: 'a', score: 50 },
{ item: 'Marketing', user: 'b', score: 60 },
{ item: 'Users', user: 'a', score: 40 },
{ item: 'Users', user: 'b', score: 50 },
{ item: 'Test', user: 'a', score: 60 },
{ item: 'Test', user: 'b', score: 70 },
{ item: 'Language', user: 'a', score: 70 },
{ item: 'Language', user: 'b', score: 50 },
{ item: 'Technology', user: 'a', score: 50 },
{ item: 'Technology', user: 'b', score: 40 },
{ item: 'Support', user: 'a', score: 30 },
{ item: 'Support', user: 'b', score: 40 },
{ item: 'Sales', user: 'a', score: 60 },
{ item: 'Sales', user: 'b', score: 40 },
{ item: 'UX', user: 'a', score: 50 },
{ item: 'UX', user: 'b', score: 60 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.scale('score', {
min: 0,
max: 80,
});
chart.coordinate('polar', {
radius: 0.8,
});
chart.tooltip({
shared: true,
});
chart.axis('item', {
line: null,
tickLine: null,
grid: {
line: {
style: {
lineDash: null,
},
},
},
});
chart.axis('score', {
line: null,
tickLine: null,
grid: {
line: {
style: {
lineDash: null,
},
},
},
});
chart.line().position('item*score').color('user').size(2);
chart.point().position('item*score').color('user').shape('circle').size(4).style({
stroke: '#fff',
lineWidth: 1,
fillOpacity: 1,
});
chart.area().position('item*score').color('user');
chart.render();
setTimeout(() => {
chart.changeData([
{ item: 'Design', user: 'b', score: 40 },
{ item: 'Development', user: 'b', score: 70 },
{ item: 'Marketing', user: 'b', score: 60 },
{ item: 'Users', user: 'b', score: 50 },
{ item: 'Test', user: 'b', score: 70 },
{ item: 'Language', user: 'b', score: 50 },
{ item: 'Technology', user: 'b', score: 40 },
{ item: 'Support', user: 'b', score: 40 },
{ item: 'Sales', user: 'b', score: 40 },
{ item: 'UX', user: 'b', score: 60 },
]);
}, 1000);
});
it('Polygon', () => {
const data = [
[0, 0, 10],
[0, 1, 19],
[0, 2, 8],
[0, 3, 24],
[0, 4, 67],
[1, 0, 92],
[1, 1, 58],
[1, 2, 78],
[1, 3, 117],
[1, 4, 48],
[2, 0, 35],
[2, 1, 15],
[2, 2, 123],
[2, 3, 64],
[2, 4, 52],
[3, 0, 72],
[3, 1, 132],
[3, 2, 114],
[3, 3, 19],
[3, 4, 16],
[4, 0, 38],
[4, 1, 5],
[4, 2, 8],
[4, 3, 117],
[4, 4, 115],
[5, 0, 88],
[5, 1, 32],
[5, 2, 12],
[5, 3, 6],
[5, 4, 120],
[6, 0, 13],
[6, 1, 44],
[6, 2, 88],
[6, 3, 98],
[6, 4, 96],
[7, 0, 31],
[7, 1, 1],
[7, 2, 82],
[7, 3, 32],
[7, 4, 30],
[8, 0, 85],
[8, 1, 97],
[8, 2, 123],
[8, 3, 64],
[8, 4, 84],
[9, 0, 47],
[9, 1, 114],
[9, 2, 31],
[9, 3, 48],
[9, 4, 91],
];
const source = [];
for (const item of data) {
source.push({
name: item[0],
day: item[1],
sales: item[2],
});
}
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(source);
chart.scale('name', {
type: 'cat',
values: ['Alexander', 'Marie', 'Maximilian', 'Sophia', 'Lukas', 'Maria', 'Leon', 'Anna', 'Tim', 'Laura'],
});
chart.scale('day', {
type: 'cat',
values: ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday'],
});
chart.coordinate('polar');
chart
.polygon()
.position('name*day')
.color('sales', '#BAE7FF-#1890FF-#0050B3')
.label('sales', {
offset: -2,
style: {
fill: '#fff',
shadowBlur: 2,
shadowColor: 'rgba(0, 0, 0, .45)',
},
})
.style({
lineWidth: 1,
stroke: '#fff',
});
chart.render();
// setTimeout(() => {
// const newData = [
// [0, 0, 102],
// [0, 1, 139],
// [0, 2, 83],
// [0, 3, 34],
// [0, 4, 67],
// ];
// const newSource = [];
// for (const item of newData) {
// newSource.push({
// name: item[0],
// day: item[1],
// sales: item[2],
// });
// }
// chart.scale('name', {
// type: 'cat',
// values: ['Alexander'],
// });
// chart.changeData(newSource);
// }, 2000);
});
it('Schema', () => {
const data = [
{ x: 'Oceania', low: 1, q1: 9, median: 16, q3: 22, high: 24 },
{ x: 'East Europe', low: 1, q1: 5, median: 8, q3: 12, high: 16 },
{ x: 'Australia', low: 1, q1: 8, median: 12, q3: 19, high: 26 },
{ x: 'South America', low: 2, q1: 8, median: 12, q3: 21, high: 28 },
{ x: 'North Africa', low: 1, q1: 8, median: 14, q3: 18, high: 24 },
{ x: 'North America', low: 3, q1: 10, median: 17, q3: 28, high: 30 },
{ x: 'West Europe', low: 1, q1: 7, median: 10, q3: 17, high: 22 },
{ x: 'West Africa', low: 1, q1: 6, median: 8, q3: 13, high: 16 },
];
const dv = new DataSet.DataView().source(data);
dv.transform({
type: 'map',
callback: (obj) => {
obj.range = [obj.low, obj.q1, obj.median, obj.q3, obj.high];
return obj;
},
});
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
localRefresh: false,
});
chart.data(dv.rows);
chart.scale('range', {
max: 35,
});
chart.coordinate('polar');
chart.schema().position('x*range').shape('box').style({
stroke: '#545454',
fill: '#1890FF',
fillOpacity: 0.3,
});
chart.render();
setTimeout(() => {
const newData = [
{ x: 'Oceania', low: 1, q1: 9, median: 16, q3: 22, high: 24 },
{ x: 'East Europe', low: 1, q1: 5, median: 8, q3: 12, high: 16 },
{ x: 'Australia', low: 1, q1: 8, median: 12, q3: 19, high: 26 },
{ x: 'South America', low: 2, q1: 8, median: 12, q3: 21, high: 28 },
{ x: 'West Europe', low: 1, q1: 7, median: 10, q3: 17, high: 22 },
{ x: 'West Africa', low: 1, q1: 6, median: 8, q3: 13, high: 16 },
];
const dv1 = new DataSet.DataView().source(newData);
dv1.transform({
type: 'map',
callback: (obj) => {
obj.range = [obj.low, obj.q1, obj.median, obj.q3, obj.high];
return obj;
},
});
chart.changeData(dv1.rows);
}, 1000);
});
it('Point chart', () => {
const data = [
{ gender: 'female', height: 161.2, weight: 51.6 },
{ gender: 'female', height: 167.5, weight: 59 },
{ gender: 'female', height: 159.5, weight: 49.2 },
{ gender: 'female', height: 157, weight: 63 },
{ gender: 'female', height: 155.8, weight: 53.6 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
chart.coordinate('polar');
chart.point().position('height*weight').size(4).shape('circle');
chart.render();
setTimeout(() => {
chart.changeData([
{ gender: 'female', height: 161.2, weight: 516 },
{ gender: 'female', height: 159.5, weight: 49.2 },
{ gender: 'female', height: 174, weight: 75.7 },
{ gender: 'female', height: 172.7, weight: 61.1 },
]);
}, 2000);
});
it('Path Chart', () => {
const data = [
{ consumption: 0.65, price: 1, year: 1965 },
{ consumption: 0.66, price: 1.05, year: 1966 },
{ consumption: 0.64, price: 1.1, year: 1967 },
{ consumption: 0.63, price: 1.12, year: 1968 },
{ consumption: 0.55, price: 1.15, year: 1969 },
{ consumption: 0.57, price: 1.19, year: 1970 },
{ consumption: 0.58, price: 1.14, year: 1971 },
{ consumption: 0.59, price: 1, year: 1972 },
{ consumption: 0.57, price: 0.96, year: 1973 },
{ consumption: 0.55, price: 0.92, year: 1974 },
{ consumption: 0.54, price: 0.88, year: 1975 },
{ consumption: 0.55, price: 0.87, year: 1976 },
{ consumption: 0.42, price: 0.89, year: 1977 },
{ consumption: 0.28, price: 1, year: 1978 },
{ consumption: 0.15, price: 1.1, year: 1979 },
];
const div = createDiv();
div.style.display = 'inline-block';
const chart = new Chart({
container: div,
width: 250,
height: 200,
});
chart.data(data);
const path = chart
.path()
.animate({
appear: {
animation: 'path-in',
},
})
.position('price*consumption')
.label('year')
.size(2);
chart.point().position('price*consumption').shape('triangle');
chart.render();
setTimeout(() => {
// path.hide();
chart.changeData([
{ consumption: 0.58, price: 1.14, year: 1971 },
{ consumption: 0.59, price: 1, year: 1972 },
{ consumption: 0.57, price: 9.6, year: 1973 },
{ consumption: 0.55, price: 0.92, year: 1974 },
{ consumption: 0.54, price: 0.8, year: 1975 },
{ consumption: 0.55, price: 0.87, year: 1976 },
{ consumption: 0.42, price: 0.89, year: 1977 },
{ consumption: 0.28, price: 1, year: 1978 },
]);
}, 1000);
});
}); | the_stack |
import { PagedAsyncIterableIterator } from "@azure/core-paging";
import { ProductApi } from "../operationsInterfaces";
import * as coreClient from "@azure/core-client";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
import { ApiManagementClient } from "../apiManagementClient";
import {
ApiContract,
ProductApiListByProductNextOptionalParams,
ProductApiListByProductOptionalParams,
ProductApiListByProductResponse,
ProductApiCheckEntityExistsOptionalParams,
ProductApiCheckEntityExistsResponse,
ProductApiCreateOrUpdateOptionalParams,
ProductApiCreateOrUpdateResponse,
ProductApiDeleteOptionalParams,
ProductApiListByProductNextResponse
} from "../models";
/// <reference lib="esnext.asynciterable" />
/** Class containing ProductApi operations. */
export class ProductApiImpl implements ProductApi {
private readonly client: ApiManagementClient;
/**
* Initialize a new instance of the class ProductApi class.
* @param client Reference to the service client
*/
constructor(client: ApiManagementClient) {
this.client = client;
}
/**
* Lists a collection of the APIs associated with a product.
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param options The options parameters.
*/
public listByProduct(
resourceGroupName: string,
serviceName: string,
productId: string,
options?: ProductApiListByProductOptionalParams
): PagedAsyncIterableIterator<ApiContract> {
const iter = this.listByProductPagingAll(
resourceGroupName,
serviceName,
productId,
options
);
return {
next() {
return iter.next();
},
[Symbol.asyncIterator]() {
return this;
},
byPage: () => {
return this.listByProductPagingPage(
resourceGroupName,
serviceName,
productId,
options
);
}
};
}
private async *listByProductPagingPage(
resourceGroupName: string,
serviceName: string,
productId: string,
options?: ProductApiListByProductOptionalParams
): AsyncIterableIterator<ApiContract[]> {
let result = await this._listByProduct(
resourceGroupName,
serviceName,
productId,
options
);
yield result.value || [];
let continuationToken = result.nextLink;
while (continuationToken) {
result = await this._listByProductNext(
resourceGroupName,
serviceName,
productId,
continuationToken,
options
);
continuationToken = result.nextLink;
yield result.value || [];
}
}
private async *listByProductPagingAll(
resourceGroupName: string,
serviceName: string,
productId: string,
options?: ProductApiListByProductOptionalParams
): AsyncIterableIterator<ApiContract> {
for await (const page of this.listByProductPagingPage(
resourceGroupName,
serviceName,
productId,
options
)) {
yield* page;
}
}
/**
* Lists a collection of the APIs associated with a product.
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param options The options parameters.
*/
private _listByProduct(
resourceGroupName: string,
serviceName: string,
productId: string,
options?: ProductApiListByProductOptionalParams
): Promise<ProductApiListByProductResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, serviceName, productId, options },
listByProductOperationSpec
);
}
/**
* Checks that API entity specified by identifier is associated with the Product entity.
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param apiId API revision identifier. Must be unique in the current API Management service instance.
* Non-current revision has ;rev=n as a suffix where n is the revision number.
* @param options The options parameters.
*/
checkEntityExists(
resourceGroupName: string,
serviceName: string,
productId: string,
apiId: string,
options?: ProductApiCheckEntityExistsOptionalParams
): Promise<ProductApiCheckEntityExistsResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, serviceName, productId, apiId, options },
checkEntityExistsOperationSpec
);
}
/**
* Adds an API to the specified product.
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param apiId API revision identifier. Must be unique in the current API Management service instance.
* Non-current revision has ;rev=n as a suffix where n is the revision number.
* @param options The options parameters.
*/
createOrUpdate(
resourceGroupName: string,
serviceName: string,
productId: string,
apiId: string,
options?: ProductApiCreateOrUpdateOptionalParams
): Promise<ProductApiCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, serviceName, productId, apiId, options },
createOrUpdateOperationSpec
);
}
/**
* Deletes the specified API from the specified product.
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param apiId API revision identifier. Must be unique in the current API Management service instance.
* Non-current revision has ;rev=n as a suffix where n is the revision number.
* @param options The options parameters.
*/
delete(
resourceGroupName: string,
serviceName: string,
productId: string,
apiId: string,
options?: ProductApiDeleteOptionalParams
): Promise<void> {
return this.client.sendOperationRequest(
{ resourceGroupName, serviceName, productId, apiId, options },
deleteOperationSpec
);
}
/**
* ListByProductNext
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param productId Product identifier. Must be unique in the current API Management service instance.
* @param nextLink The nextLink from the previous successful call to the ListByProduct method.
* @param options The options parameters.
*/
private _listByProductNext(
resourceGroupName: string,
serviceName: string,
productId: string,
nextLink: string,
options?: ProductApiListByProductNextOptionalParams
): Promise<ProductApiListByProductNextResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, serviceName, productId, nextLink, options },
listByProductNextOperationSpec
);
}
}
// Operation Specifications
const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
const listByProductOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/apis",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.ApiCollection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [
Parameters.filter,
Parameters.top,
Parameters.skip,
Parameters.apiVersion
],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.serviceName,
Parameters.subscriptionId,
Parameters.productId
],
headerParameters: [Parameters.accept],
serializer
};
const checkEntityExistsOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/apis/{apiId}",
httpMethod: "HEAD",
responses: {
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.serviceName,
Parameters.subscriptionId,
Parameters.apiId,
Parameters.productId
],
headerParameters: [Parameters.accept],
serializer
};
const createOrUpdateOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/apis/{apiId}",
httpMethod: "PUT",
responses: {
200: {
bodyMapper: Mappers.ApiContract
},
201: {
bodyMapper: Mappers.ApiContract
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.serviceName,
Parameters.subscriptionId,
Parameters.apiId,
Parameters.productId
],
headerParameters: [Parameters.accept],
serializer
};
const deleteOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/apis/{apiId}",
httpMethod: "DELETE",
responses: {
200: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.serviceName,
Parameters.subscriptionId,
Parameters.apiId,
Parameters.productId
],
headerParameters: [Parameters.accept],
serializer
};
const listByProductNextOperationSpec: coreClient.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.ApiCollection
},
default: {
bodyMapper: Mappers.ErrorResponse
}
},
queryParameters: [
Parameters.filter,
Parameters.top,
Parameters.skip,
Parameters.apiVersion
],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.serviceName,
Parameters.subscriptionId,
Parameters.nextLink,
Parameters.productId
],
headerParameters: [Parameters.accept],
serializer
}; | the_stack |
import test from 'japa'
import { ApplicationContract } from '@ioc:Adonis/Core/Application'
import { setupApplication, fs, setupDb, cleanDb, clearDb } from '../test-helpers'
let app: ApplicationContract
test.group('Slugify Decorator', (group) => {
group.beforeEach(async () => {
app = await setupApplication(['../../providers/SlugifyProvider'])
await setupDb(app.container.resolveBinding('Adonis/Lucid/Database'))
})
group.afterEach(async () => {
await clearDb(app.container.resolveBinding('Adonis/Lucid/Database'))
})
group.after(async () => {
await cleanDb(app.container.resolveBinding('Adonis/Lucid/Database'))
await fs.cleanup()
})
test('generate slug for a model', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'] })
public slug: string
}
const post = new Post()
post.title = 'Hello world'
await post.save()
await post.refresh()
assert.equal(post.slug, 'hello-world')
})
test('do not set slug when defined manually', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'] })
public slug: string
}
const post = new Post()
post.title = 'Hello world'
post.slug = 'user-defined-slug'
await post.save()
await post.refresh()
assert.equal(post.slug, 'user-defined-slug')
})
test('do not set slug when source is undefined', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'] })
public slug: string
}
const post = new Post()
await post.save()
await post.refresh()
assert.isNull(post.slug)
})
test('generate unique slug when a similar one already exists', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column({ columnName: 'slug' })
@slugify({ strategy: 'dbIncrement', fields: ['title'] })
public aDifferentPropertyName: string
}
await Post.createMany([
{
title: 'Hello world',
aDifferentPropertyName: 'hello-world',
},
{
title: 'Hello world',
aDifferentPropertyName: 'hello-10-world',
},
{
title: 'Hello world',
aDifferentPropertyName: 'hello10world',
},
])
const post = new Post()
post.title = 'Hello world'
await post.save()
await post.refresh()
assert.equal(post.aDifferentPropertyName, 'hello-world-1')
})
test('do not update slug when source changes', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'] })
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = 'A new title'
await post.save()
await post.refresh()
assert.equal(post.slug, 'hello-world')
})
test('update slug when allowUpdates is set to true', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'], allowUpdates: true })
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = 'A new title'
await post.save()
await post.refresh()
assert.equal(post.slug, 'a-new-title')
})
test('update slug when allowUpdates function returns true', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({
strategy: 'dbIncrement',
fields: ['title'],
allowUpdates: (post: Post) => {
assert.instanceOf(post, Post)
return true
},
})
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = 'A new title'
await post.save()
await post.refresh()
assert.equal(post.slug, 'a-new-title')
})
test('do not update slug when allowUpdates function returns false', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({
strategy: 'dbIncrement',
fields: ['title'],
allowUpdates: (post: Post) => {
assert.instanceOf(post, Post)
return false
},
})
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = 'A new title'
await post.save()
await post.refresh()
assert.equal(post.slug, 'hello-world')
})
test('do not update slug when defined manually', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'], allowUpdates: true })
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = 'A new title'
post.slug = 'the-old-slug'
await post.save()
await post.refresh()
assert.equal(post.slug, 'the-old-slug')
})
test('do not update slug when source is untouched', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'], allowUpdates: true })
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
await post.save()
await post.refresh()
assert.equal(post.slug, 'hello-world')
})
test('do not update slug when source is set to null', async (assert) => {
const { BaseModel, column } = app.container.resolveBinding('Adonis/Lucid/Orm')
const { slugify } = app.container.resolveBinding('Adonis/Addons/LucidSlugify')
class Post extends BaseModel {
@column({ isPrimary: true })
public id: number
@column()
public title: string | null
@column()
@slugify({ strategy: 'dbIncrement', fields: ['title'], allowUpdates: true })
public slug: string
}
await Post.createMany([
{
title: 'Hello world',
slug: 'hello-world',
},
{
title: 'Hello world',
slug: 'hello-10-world',
},
{
title: 'Hello world',
slug: 'hello10world',
},
])
const post = await Post.findOrFail(1)
post.title = null
await post.save()
await post.refresh()
assert.equal(post.slug, 'hello-world')
})
}) | the_stack |
import * as debug from 'debug'
import {
fromJS as ImmutableFromJS,
List as ImmutableList,
Map as ImmutableMap,
} from 'immutable'
import { BranchId, StateHash, StateId } from './interfaces'
const log = debug('redux-dag-history:DagGraph')
/**
* A convenient wrapper around the ImmutableJS-based Dag-History data structure
*/
export default class DagGraph<T> {
/**
* Constructs a new instance
* @param graph The immutableJS instance
*/
constructor(public graph: ImmutableMap<any, any>) {
if (!graph) {
throw new Error("'graph' parameter must be defined")
}
if (!graph.getIn) {
throw new Error("'graph' must be an immutablejs instance")
}
}
/**
* Print out the state graph in the console, for debug purposes
*/
public print(): string {
const graph = this.graph.toJS()
let root: any = null
const states: { [key: string]: any } = {}
const getOrCreateState = (stateId: StateId) => {
let result = states[stateId]
if (!result) {
result = {
id: stateId,
name: this.stateName(stateId),
children: [] as StateId[],
}
states[stateId] = result
}
return result
}
Object.keys(graph.states || {}).forEach(stateId => {
const parentId = graph.states[stateId].parent
const state = getOrCreateState(stateId)
if (!parentId) {
root = state
}
getOrCreateState(parentId).children.push(state)
states[stateId] = state
})
const tree = {
current: graph.current,
chronologicalStates: graph.chronologicalStates,
branches: graph.branches,
states: graph.states,
dag: root,
}
return JSON.stringify(tree, null, 4)
}
/**
* Gets the current state ID
*/
public get currentStateId(): StateId {
return this.graph.getIn(['current', 'state'])
}
/**
* Gets the start depth of the current branch
* (i.e. how many commits it took from the dag root to reach the start of the current branch)
* @param branch The BranchId to get the starting depth of
*/
public branchStartDepth(branch: BranchId): number {
return this.stateDepth(this.firstOn(branch))
}
/**
* Gets the ending depth of the current branch
* (i.e. how many commits it took from the dag root to reach the end of the current branch)
* @param branch The BranchId to get the starting depth of
*/
public branchEndDepth(branch: BranchId): number {
return this.stateDepth(this.latestOn(branch))
}
/**
* Gets the depth of a specific state
* (i.e. how many commits it took from the root to reach the state)
* @param branch The BranchId to get the starting depth of
*/
public stateDepth(commit: StateId): number {
return this.commitPath(commit).length - 1
}
/**
* Gets the depth of a commit on a branch
* (i.e. how many commits it took from the root to reach the commit on the branch)
* @param branch The branch to qualify the commit
* @param commit The commit to get the depth of
*/
public depthIndexOf(branch: BranchId, commit: StateId): number | undefined {
const commits = this.branchCommitPath(branch)
const foundIndex = commits.indexOf(commit)
if (foundIndex === -1) {
return undefined
} else {
const start = this.branchStartDepth(branch)
return start + foundIndex
}
}
/**
* Gets the maximum depth of the graph
*/
public get maxDepth(): number {
const branches = this.branches
const branchDepths = branches.map(b => this.branchEndDepth(b))
let max: number = -1
branchDepths.forEach(d => {
if (d > max) {
max = d
}
})
return max
}
/**
* Mutate the current state id
* @param stateId The new state id
*/
public setCurrentStateId(stateId: StateId) {
this.graph = this.graph.setIn(['current', 'state'], stateId)
this.logVisit(stateId)
return this
}
/**
* Get the current branch id
*/
public get currentBranch(): BranchId {
return this.graph.getIn(['current', 'branch'])
}
/**
* Get the last-generated state id
*/
public get lastStateId(): StateId {
return this.graph.get('lastStateId')
}
/**
* Set the last-generated state id
*/
public setLastStateId(value: StateId) {
this.graph = this.graph.set('lastStateId', value)
return this
}
/**
* Get the last-generated branch id
*/
public get lastBranchId(): StateId {
return this.graph.get('lastBranchId')
}
/**
* Set the last-generated branch id
*/
public setLastBranchId(value: BranchId) {
this.graph = this.graph.set('lastBranchId', value)
return this
}
/**
* Mutate the current branch id
* @param branchId The new branch id
*/
public setCurrentBranch(branchId: BranchId) {
this.graph = this.graph.setIn(['current', 'branch'], branchId)
return this
}
/**
* Gets the latest state id on a given branch
* @param branch The branch to search on
*/
public latestOn(branch: BranchId): StateId {
return this.graph.getIn(['branches', `${branch}`, 'latest'])
}
/**
* Gets the 'committed' state id on a given branch.
* Pushing new states bumps the committed state id. The 'committed' state is the most recently visited in a branch.
* @param branch The branch to search on
*/
public committedOn(branch: BranchId): StateId {
return this.graph.getIn(['branches', `${branch}`, 'committed'])
}
/**
* Updates the latest state on the branch
* @param branch The branch id
* @param commit The new latest commit on the branchd
*/
public setLatest(branch: BranchId, commit: StateId) {
this.graph = this.graph.setIn(['branches', `${branch}`, 'latest'], commit)
return this
}
/**
* Updates the committed state on the branch
* @param branch The branch id
* @param commit The committed state id
*/
public setCommitted(branch: BranchId, commit: StateId) {
this.graph = this.graph.setIn(
['branches', `${branch}`, 'committed'],
commit,
)
return this
}
/**
* Each state is mapped to a single branch. This updates the branch for a state.
* @param commit The state id
* @param branch The branch that is now associated with the state.
*/
public markStateForBranch(commit: StateId, branch: BranchId) {
this.graph = this.graph.setIn(['states', `${commit}`, 'branch'], branch)
return this
}
/**
* Sets the first state id of a branch
* @param branch The branch id
* @param commit The first state id on the branch
*/
public setFirst(branch: BranchId, commit: StateId) {
this.graph = this.graph.setIn(['branches', `${branch}`, 'first'], commit)
return this
}
/**
* Gets the first state id on a branch
* @param branch The branch to search on
*/
public firstOn(branch: BranchId): StateId {
return this.graph.getIn(['branches', `${branch}`, 'first'])
}
/**
* Update the name of a state
* @param commit The id of the state to rename
* @param name The new name of the state
*/
public renameState(commit: StateId, name: string) {
this.graph = this.graph.setIn(['states', `${commit}`, 'name'], name)
return this
}
/**
* Gets the name of a given state
* @param commit The state id to get the name of
*/
public stateName(commit: StateId) {
return this.graph.getIn(['states', `${commit}`, 'name'])
}
/**
* Gets the name of a branch
* @param branch The branch to get the name of
*/
public getBranchName(branch: BranchId): string {
return this.graph.getIn(['branches', `${branch}`, 'name'])
}
/**
* Sets the name of a branch
* @param branch The branch to set the name of
* @param name The new branch name
*/
public setBranchName(branch: BranchId, name: string) {
this.graph = this.graph.setIn(['branches', `${branch}`, 'name'], name)
return this
}
/**
* Retrieve the physical state of a state id
* @param commit The state id to get the physical state of
*/
public getState(commit: StateId): T {
return this.graph.getIn(['physicalStates', `${commit}`])
}
/**
* Inserts a new state
* @param commit The new state id
* @param parent The parent state id
* @param state The physical state
* @param name The name of the state
*/
public insertState(commit: StateId, parent: StateId, state: T, name: string) {
log('Inserting new commit', commit)
if (this.graph.getIn(['states', `${commit}`])) {
log('Commit %s is already present', this.getState(commit))
}
this.graph = this.graph
.setIn(['states', `${commit}`], ImmutableFromJS({ name, parent }))
.setIn(['physicalStates', `${commit}`], state)
return this
}
/**
* Logs a state visitation into the chronological state array
*/
public logVisit(state: StateId) {
const chronologicalStates = this.graph.get(
'chronologicalStates',
) as ImmutableList<any>
this.graph = this.graph.setIn(
['chronologicalStates'],
chronologicalStates.push(state),
)
return this
}
/**
* Get child state ids of a given state id.
* @param commit The parent state id
*/
public childrenOf(commit: StateId): StateId[] {
const states = this.graph.get('states')
return states
.toSeq()
.filter((state: ImmutableMap<any, any>) => state.get('parent') === commit)
.map((state: ImmutableMap<any, any>, key: string) => key)
.toList()
.toJS()
}
/**
* Gets the parent state id of a given state id
* @param commit The state id to get the parent state id of
*/
public parentOf(commit: StateId): StateId {
return this.graph.getIn(['states', `${commit}`, 'parent'])
}
/**
* Gets 'alternate parents' for a given state. As the graph is expanded,
* and if state equality is enabled, then we can determine optimal paths to a given state as
* it is re-discovered through alternative means.
*
* i.e. When this happens
*
* Original Visitiation
*
* a -> b -> c -> D // orginal path
* \--> D' // branched path
*
* And D and D' are considered logically equivalent, then the parent state of D remains
* 'c', but 'b' is added as an 'alternate parent'
*
* @param commit The state id to find "alternate parents" of
*/
public alternateParentsOf(commit: StateId): StateId[] {
const result = this.graph.getIn(['states', `${commit}`, 'alternateParents'])
return result ? result.toJS() : []
}
/**
* Gets the shallowest parent of a commit. Compares the graph-depth of the parent and
* alternate parents.
* @param commit The state id to search on
*/
public shallowestParentOf(commit: StateId): StateId {
const depthOf = (t: BranchId) => this.depthIndexOf(this.branchOf(t), t)
let result: StateId = this.parentOf(commit)
let minDepth: number = depthOf(result) as number
this.alternateParentsOf(commit).forEach(t => {
const depth = depthOf(t)
if (depth !== undefined && depth < minDepth) {
minDepth = depth
result = t
}
})
return result
}
/**
* Replace the physical state of a stateId
* @param commit The state ID to replace
* @param state The new state
*/
public replaceState(commit: StateId, state: T) {
this.graph = this.graph.setIn(['physicalStates', `${commit}`], state)
return this
}
/**
* Gets the state-id path of a state id
* @param commit The state id to get the commit path of
*/
public commitPath(commit: StateId): StateId[] {
if (commit === undefined) {
return []
}
const path: StateId[] = [commit]
let current = commit
do {
const parent = this.parentOf(current)
if (parent) {
path.unshift(parent)
}
current = parent
} while (current)
return path
}
/**
* Gets the shortest path of a state id, considering alternate parentage
* @param commit The state id to get the commit path of
*/
public shortestCommitPath(commit: StateId): StateId[] {
if (commit === undefined) {
return []
}
const path: StateId[] = [commit]
let current = commit
do {
const parent = this.shallowestParentOf(current)
if (parent) {
path.unshift(parent)
}
current = parent
} while (current)
return path
}
/**
* Gets a path of all state ids on a branch
* @param branch The branch to get the commit path on
*/
public branchCommitPath(branch: BranchId): StateId[] {
if (branch === undefined) {
return []
}
const latest = this.latestOn(branch)
const path = this.commitPath(latest)
const firstCommitOnBranch = this.firstOn(branch)
return path.slice(path.indexOf(firstCommitOnBranch))
}
/**
* Sets the parent state id of a child state
* @param commit The child state id
* @param parent The parent state id
*/
public setParent(commit: StateId, parent: StateId) {
this.graph = this.graph.setIn(['states', `${commit}`, 'parent'], parent)
}
/**
* Adds an alternate parent to a state id
* @param commit The state id
* @param parent The new alternate parent
*/
public setAlternateParent(commit: StateId, parent: StateId) {
if (this.parentOf(commit) !== parent) {
return
}
const path = ['states', `${commit}`, 'alternateParents']
const parentList: ImmutableList<StateId> =
(this.graph.getIn(path) as ImmutableList<StateId>) ||
ImmutableList<StateId>()
if (!parentList.contains(parent)) {
this.graph = this.graph.setIn(path, parentList.push(parent))
}
}
/**
* Gets the list of branch ids available
*/
public get branches(): BranchId[] {
const branches = this.graph.get('branches')
return Array.from(branches.keys()) as BranchId[]
}
/**
* Gets the branch a state is associated with
* @param commit The state id
*/
public branchOf(commit: StateId): BranchId {
return this.graph.getIn(['states', `${commit}`, 'branch'])
}
/**
* Gets all of the branches that a given state id is ancestral for.
*
* i.e. search all children and aggregate branch ids
*
* @param commit The state id to search on
*/
public branchesOf(commit: StateId): BranchId[] {
if (!commit) {
throw new Error('commit must be defined')
}
const children = this.childrenOf(commit)
if (children.length === 0) {
const branches: BranchId[] = []
for (const branch of this.branches) {
if (this.latestOn(branch) === commit) {
branches.push(branch)
}
}
return branches
} else {
let result: BranchId[] = []
const childrenBranches = children.map(child => this.branchesOf(child))
childrenBranches.forEach(cb => (result = result.concat(...cb)))
return result
}
}
/**
* Squash a branch down to one commit
*/
public squashCurrentBranch() {
const toSquash: StateId[] = []
const branch = this.branchOf(this.currentStateId)
let current = this.parentOf(this.currentStateId)
let keepSquashing = true
do {
if (current && this.branchOf(current) === branch) {
toSquash.push(current)
current = this.parentOf(current)
} else {
keepSquashing = false
}
} while (keepSquashing)
log(
'squashing %s states on branch %s => ',
toSquash.length,
branch,
current,
toSquash,
)
if (toSquash.length > 0) {
toSquash.forEach(c => this.remove(c))
this.setParent(this.currentStateId, current)
}
return this
}
/**
* Search for a state by hash code
* @param hash The hash code to search for
*/
public getStateForHash(hash: StateHash): StateId {
return this.graph.getIn(['stateHash', hash])
}
/**
* Registers a state with a hash code. Existing hashes are removed
* @param hash The hash code to register.
*/
public setHashForState(hash: StateHash, state: StateId): void {
const stateHashPath = ['states', state, 'hash']
const existingHash = this.graph.getIn(stateHashPath)
// Remove the hash from the state description and the hash->state map
if (existingHash) {
this.graph = this.graph
.removeIn(stateHashPath)
.deleteIn(['stateHash', existingHash])
}
this.graph = this.graph
.setIn(stateHashPath, hash)
.setIn(['stateHash', hash], state)
}
/**
* Remove a state from the graph
* @param commit The state to remove
*/
private remove(commit: StateId) {
// TODO: we should remove this from the branch list and other metadata as well.
// This will be how we keep the DAG pruned to a fixed size.
this.graph = this.graph
.deleteIn(['states', `${commit}`])
.deleteIn(['physicalStates', `${commit}`])
}
} | the_stack |
import React, { useContext } from 'react'
import { AnimatePresence } from 'framer-motion'
import { useRouter } from 'next/router'
import {
Accordion,
AccordionItem,
Box,
Button,
Select,
Tag,
Text,
Tooltip,
} from '@island.is/island-ui/core'
import {
BlueBox,
CaseFileList,
CourtRecordAccordionItem,
FormContentContainer,
InfoCard,
PdfButton,
PoliceRequestAccordionItem,
RulingAccordionItem,
} from '@island.is/judicial-system-web/src/shared-components'
import * as Constants from '@island.is/judicial-system-web/src/utils/constants'
import {
CaseAppealDecision,
CaseCustodyRestrictions,
CaseDecision,
CaseState,
CaseType,
InstitutionType,
isRestrictionCase,
isInvestigationCase,
UserRole,
} from '@island.is/judicial-system/types'
import type { Case } from '@island.is/judicial-system/types'
import { getRestrictionTagVariant } from '@island.is/judicial-system-web/src/utils/stepHelper'
import {
capitalize,
caseTypes,
formatDate,
getShortRestrictionByValue,
TIME_FORMAT,
} from '@island.is/judicial-system/formatters'
import { UserContext } from '@island.is/judicial-system-web/src/shared-components/UserProvider/UserProvider'
import AppealSection from './Components/AppealSection/AppealSection'
import { useInstitution } from '@island.is/judicial-system-web/src/utils/hooks'
import { ValueType } from 'react-select/src/types'
import { ReactSelectOption } from '@island.is/judicial-system-web/src/types'
import { signedVerdictOverview } from '@island.is/judicial-system-web/messages/Core/signedVerdictOverview'
import { useIntl } from 'react-intl'
import {
UploadState,
useCourtUpload,
} from '@island.is/judicial-system-web/src/utils/hooks/useCourtUpload'
import { UploadStateMessage } from './Components/UploadStateMessage'
import InfoBox from '@island.is/judicial-system-web/src/shared-components/InfoBox/InfoBox'
import { core } from '@island.is/judicial-system-web/messages'
interface Props {
workingCase: Case
setWorkingCase: React.Dispatch<React.SetStateAction<Case | undefined>>
setAccusedAppealDate: () => void
setProsecutorAppealDate: () => void
withdrawAccusedAppealDate: () => void
withdrawProsecutorAppealDate: () => void
shareCaseWithAnotherInstitution: (
selectedInstitution?: ValueType<ReactSelectOption>,
) => void
selectedSharingInstitutionId: ValueType<ReactSelectOption>
setSelectedSharingInstitutionId: React.Dispatch<
React.SetStateAction<ValueType<ReactSelectOption>>
>
}
const SignedVerdictOverviewForm: React.FC<Props> = (props) => {
const {
workingCase,
setWorkingCase,
setAccusedAppealDate,
setProsecutorAppealDate,
withdrawAccusedAppealDate,
withdrawProsecutorAppealDate,
shareCaseWithAnotherInstitution,
selectedSharingInstitutionId,
setSelectedSharingInstitutionId,
} = props
const router = useRouter()
const { user } = useContext(UserContext)
const { formatMessage } = useIntl()
const { prosecutorsOffices } = useInstitution()
const { uploadFilesToCourt, uploadState } = useCourtUpload(
workingCase,
setWorkingCase,
)
/**
* If the case is not rejected it must be accepted because
* this screen is only rendered if the case is either accepted
* or rejected. Here we are first handling the case where a case
* is rejected, then the case where a case is accepted and the
* custody end date is in the past and then we assume that
* the case is accepted and the custody end date has not come yet.
* For accepted cases, we first handle the case where the judge
* decided only accept an alternative travel ban and finally we
* assume that the actual custody was accepted.
*/
const titleForCase = (theCase: Case) => {
const isTravelBan =
theCase.decision === CaseDecision.ACCEPTING_ALTERNATIVE_TRAVEL_BAN ||
theCase.type === CaseType.TRAVEL_BAN
if (theCase.state === CaseState.REJECTED) {
if (isInvestigationCase(theCase.type)) {
return 'Kröfu um rannsóknarheimild hafnað'
} else {
return 'Kröfu hafnað'
}
}
if (theCase.state === CaseState.DISMISSED) {
return formatMessage(signedVerdictOverview.dismissedTitle)
}
if (theCase.isValidToDateInThePast) {
return isTravelBan ? 'Farbanni lokið' : 'Gæsluvarðhaldi lokið'
}
return isTravelBan
? 'Farbann virkt'
: isInvestigationCase(theCase.type)
? 'Krafa um rannsóknarheimild samþykkt'
: 'Gæsluvarðhald virkt'
}
const subtitleForCase = (theCase: Case) => {
const isTravelBan =
theCase.decision === CaseDecision.ACCEPTING_ALTERNATIVE_TRAVEL_BAN ||
theCase.type === CaseType.TRAVEL_BAN
if (
theCase.decision === CaseDecision.REJECTING ||
theCase.decision === CaseDecision.DISMISSING ||
isInvestigationCase(theCase.type)
) {
return `Úrskurðað ${formatDate(
theCase.courtEndTime,
'PPP',
)} kl. ${formatDate(theCase.courtEndTime, TIME_FORMAT)}`
}
if (theCase.isValidToDateInThePast) {
return `${
isTravelBan ? 'Farbann' : 'Gæsla' // ACCEPTING
} rann út ${formatDate(theCase.validToDate, 'PPP')} kl. ${formatDate(
theCase.validToDate,
TIME_FORMAT,
)}`
}
return `${
isTravelBan ? 'Farbann' : 'Gæsla' // ACCEPTING
} til ${formatDate(theCase.validToDate, 'PPP')} kl. ${formatDate(
theCase.validToDate,
TIME_FORMAT,
)}`
}
const canCaseFilesBeOpened = () => {
const isAppealGracePeriodExpired = workingCase.isAppealGracePeriodExpired
const isProsecutorWithAccess =
user?.role === UserRole.PROSECUTOR &&
user.institution?.id === workingCase.creatingProsecutor?.institution?.id
const isCourtRoleWithAccess =
user?.role === UserRole.JUDGE || user?.role === UserRole.REGISTRAR
if (
!isAppealGracePeriodExpired &&
(isProsecutorWithAccess || isCourtRoleWithAccess)
) {
return true
} else {
return false
}
}
return (
<FormContentContainer>
<Box marginBottom={5}>
<Box marginBottom={3}>
<Button
variant="text"
preTextIcon="arrowBack"
onClick={() => router.push(Constants.REQUEST_LIST_ROUTE)}
>
Til baka
</Button>
</Box>
<Box display="flex" justifyContent="spaceBetween">
<Box>
<Box marginBottom={1}>
<Text as="h1" variant="h1">
{titleForCase(workingCase)}
</Text>
</Box>
<Text as="h5" variant="h5">
{subtitleForCase(workingCase)}
</Text>
</Box>
<Box display="flex" flexDirection="column">
{
// Custody restrictions
workingCase.decision === CaseDecision.ACCEPTING &&
workingCase.type === CaseType.CUSTODY &&
workingCase.custodyRestrictions
?.filter((restriction) =>
[
CaseCustodyRestrictions.ISOLATION,
CaseCustodyRestrictions.VISITAION,
CaseCustodyRestrictions.COMMUNICATION,
CaseCustodyRestrictions.MEDIA,
].includes(restriction),
)
?.map((custodyRestriction, index) => (
<Box marginTop={index > 0 ? 1 : 0} key={index}>
<Tag
variant={getRestrictionTagVariant(custodyRestriction)}
outlined
disabled
>
{getShortRestrictionByValue(custodyRestriction)}
</Tag>
</Box>
))
}
{
// Alternative travel ban restrictions
(workingCase.decision ===
CaseDecision.ACCEPTING_ALTERNATIVE_TRAVEL_BAN ||
(workingCase.type === CaseType.TRAVEL_BAN &&
workingCase.decision === CaseDecision.ACCEPTING)) &&
workingCase.custodyRestrictions
?.filter((restriction) =>
[
CaseCustodyRestrictions.ALTERNATIVE_TRAVEL_BAN_REQUIRE_NOTIFICATION,
CaseCustodyRestrictions.ALTERNATIVE_TRAVEL_BAN_CONFISCATE_PASSPORT,
].includes(restriction),
)
?.map((custodyRestriction, index) => (
<Box marginTop={index > 0 ? 1 : 0} key={index}>
<Tag
variant={getRestrictionTagVariant(custodyRestriction)}
outlined
disabled
>
{getShortRestrictionByValue(custodyRestriction)}
</Tag>
</Box>
))
}
</Box>
</Box>
</Box>
<Box marginBottom={workingCase.isMasked ? 15 : 6}>
<InfoCard
data={[
{
title: 'LÖKE málsnúmer',
value: workingCase.policeCaseNumber,
},
{
title: 'Málsnúmer héraðsdóms',
value: workingCase.courtCaseNumber,
},
{
title: 'Embætti',
value: `${
workingCase.creatingProsecutor?.institution?.name ??
'Ekki skráð'
}`,
},
{ title: 'Dómstóll', value: workingCase.court?.name },
{ title: 'Ákærandi', value: workingCase.prosecutor?.name },
{ title: 'Dómari', value: workingCase.judge?.name },
{ title: 'Dómritari', value: workingCase.registrar?.name },
// Conditionally add this field based on case type
...(isInvestigationCase(workingCase.type)
? [
{
title: 'Tegund kröfu',
value: capitalize(caseTypes[workingCase.type]),
},
]
: []),
]}
accusedName={workingCase.accusedName}
accusedNationalId={workingCase.accusedNationalId}
accusedAddress={workingCase.accusedAddress}
defender={{
name: workingCase.defenderName ?? '',
email: workingCase.defenderEmail,
phoneNumber: workingCase.defenderPhoneNumber,
defenderIsSpokesperson: workingCase.defenderIsSpokesperson,
}}
/>
</Box>
{(workingCase.accusedAppealDecision === CaseAppealDecision.POSTPONE ||
workingCase.accusedAppealDecision === CaseAppealDecision.APPEAL ||
workingCase.prosecutorAppealDecision === CaseAppealDecision.POSTPONE ||
workingCase.prosecutorAppealDecision === CaseAppealDecision.APPEAL) &&
(user?.role === UserRole.JUDGE || user?.role === UserRole.REGISTRAR) &&
user?.institution?.type !== InstitutionType.HIGH_COURT &&
!workingCase.isMasked && (
<Box marginBottom={7}>
<AppealSection
workingCase={workingCase}
setAccusedAppealDate={setAccusedAppealDate}
setProsecutorAppealDate={setProsecutorAppealDate}
withdrawAccusedAppealDate={withdrawAccusedAppealDate}
withdrawProsecutorAppealDate={withdrawProsecutorAppealDate}
/>
</Box>
)}
{user?.role !== UserRole.STAFF && !workingCase.isMasked && (
<Box marginBottom={5} data-testid="accordionItems">
<Accordion>
<PoliceRequestAccordionItem workingCase={workingCase} />
<CourtRecordAccordionItem workingCase={workingCase} />
<RulingAccordionItem workingCase={workingCase} />
<AccordionItem
id="caseFilesAccordionItem"
label={
<Box display="flex" alignItems="center" overflow="hidden">
{`Rannsóknargögn (${
workingCase.caseFiles ? workingCase.caseFiles.length : 0
})`}
{user &&
[UserRole.JUDGE, UserRole.REGISTRAR].includes(
user.role,
) && (
<AnimatePresence>
{uploadState === UploadState.UPLOAD_ERROR && (
<UploadStateMessage
icon="warning"
iconColor="red600"
message={formatMessage(
signedVerdictOverview.someFilesUploadedToCourtText,
)}
/>
)}
{uploadState === UploadState.ALL_UPLOADED && (
<UploadStateMessage
icon="checkmark"
iconColor="blue400"
message={formatMessage(
signedVerdictOverview.allFilesUploadedToCourtText,
)}
/>
)}
</AnimatePresence>
)}
</Box>
}
labelVariant="h3"
>
<CaseFileList
caseId={workingCase.id}
files={workingCase.caseFiles ?? []}
canOpenFiles={canCaseFilesBeOpened()}
hideIcons={user?.role === UserRole.PROSECUTOR}
handleRetryClick={(id: string) =>
workingCase.caseFiles &&
uploadFilesToCourt([
workingCase.caseFiles[
workingCase.caseFiles.findIndex((file) => file.id === id)
],
])
}
/>
{user &&
[UserRole.JUDGE, UserRole.REGISTRAR].includes(user?.role) && (
<Box display="flex" justifyContent="flexEnd">
{(workingCase.caseFiles || []).length ===
0 ? null : uploadState ===
UploadState.NONE_CAN_BE_UPLOADED ? (
<InfoBox
text={formatMessage(
signedVerdictOverview.uploadToCourtAllBrokenText,
)}
/>
) : (
<Button
size="small"
onClick={() =>
uploadFilesToCourt(workingCase.caseFiles)
}
loading={uploadState === UploadState.UPLOADING}
disabled={
uploadState === UploadState.UPLOADING ||
uploadState === UploadState.ALL_UPLOADED
}
>
{formatMessage(
uploadState === UploadState.UPLOAD_ERROR
? signedVerdictOverview.retryUploadToCourtButtonText
: signedVerdictOverview.uploadToCourtButtonText,
)}
</Button>
)}
</Box>
)}
</AccordionItem>
</Accordion>
</Box>
)}
{!workingCase.isMasked && (
<Box marginBottom={user?.role === UserRole.PROSECUTOR ? 7 : 15}>
{user?.role !== UserRole.STAFF && (
<>
<Box marginBottom={3}>
<PdfButton
caseId={workingCase.id}
title={formatMessage(core.pdfButtonRequest)}
pdfType="request"
/>
</Box>
<Box marginBottom={3}>
<PdfButton
caseId={workingCase.id}
title={formatMessage(core.pdfButtonRuling)}
pdfType="ruling?shortVersion=false"
/>
</Box>
</>
)}
<Box marginBottom={3}>
<PdfButton
caseId={workingCase.id}
title={formatMessage(core.pdfButtonRulingShortVersion)}
pdfType="ruling?shortVersion=true"
/>
</Box>
{workingCase.type === CaseType.CUSTODY &&
workingCase.state === CaseState.ACCEPTED &&
workingCase.decision === CaseDecision.ACCEPTING && (
<PdfButton
caseId={workingCase.id}
title={formatMessage(core.pdfButtonCustodyNotice)}
pdfType="custodyNotice"
/>
)}
</Box>
)}
{user?.role === UserRole.PROSECUTOR &&
user.institution?.id === workingCase.prosecutor?.institution?.id &&
isRestrictionCase(workingCase.type) && (
<Box marginBottom={9}>
<Box marginBottom={3}>
<Text variant="h3">
Opna mál fyrir öðru embætti{' '}
<Tooltip text="Hægt er að gefa öðru embætti aðgang að málinu. Viðkomandi embætti getur skoðað málið og farið fram á framlengingu." />
</Text>
</Box>
<BlueBox>
<Box display="flex">
<Box flexGrow={1} marginRight={2}>
<Select
name="sharedWithProsecutorsOfficeId"
label="Veldu embætti"
placeholder="Velja embætti sem tekur við málinu"
size="sm"
icon={
workingCase.sharedWithProsecutorsOffice
? 'checkmark'
: undefined
}
options={prosecutorsOffices
.map((prosecutorsOffice) => ({
label: prosecutorsOffice.name,
value: prosecutorsOffice.id,
}))
.filter((t) => t.value !== user?.institution?.id)}
value={
workingCase.sharedWithProsecutorsOffice
? {
label: workingCase.sharedWithProsecutorsOffice.name,
value: workingCase.sharedWithProsecutorsOffice.id,
}
: selectedSharingInstitutionId
? {
label: (selectedSharingInstitutionId as ReactSelectOption)
.label,
value: (selectedSharingInstitutionId as ReactSelectOption)
.value as string,
}
: null
}
onChange={(so: ValueType<ReactSelectOption>) =>
setSelectedSharingInstitutionId(so)
}
disabled={Boolean(workingCase.sharedWithProsecutorsOffice)}
/>
</Box>
<Button
size="small"
disabled={
!selectedSharingInstitutionId &&
!workingCase.sharedWithProsecutorsOffice
}
onClick={() =>
shareCaseWithAnotherInstitution(
selectedSharingInstitutionId,
)
}
>
{workingCase.sharedWithProsecutorsOffice
? 'Loka aðgangi'
: 'Opna mál'}
</Button>
</Box>
</BlueBox>
</Box>
)}
</FormContentContainer>
)
}
export default SignedVerdictOverviewForm | the_stack |
'use strict';
var assert = require('proclaim');
var rawCookie = require('@segment/cookie');
var sinon = require('sinon');
var analytics = require('../build');
var Analytics = require('../build').constructor;
var cookie = Analytics.cookie;
var store = Analytics.store;
var memory = Analytics.memory;
var user = analytics.user();
var User = user.User;
describe('user', function() {
var cookieKey = user._options.cookie.key;
var localStorageKey = user._options.localStorage.key;
beforeEach(function() {
user = new User();
user.reset();
});
afterEach(function() {
user.reset();
cookie.remove(cookieKey);
store.remove(cookieKey);
store.remove(localStorageKey);
store.remove('_sio');
cookie.remove('_sio');
rawCookie('_sio', null);
});
describe('()', function() {
beforeEach(function() {
cookie.set(cookieKey, 'my id');
store.set(localStorageKey, { trait: true });
});
it('should not reset user id and traits', function() {
var user = new User();
assert(user.id() === 'my id');
assert(user.traits().trait === true);
});
it('id() should fallback to localStorage', function() {
var user = new User();
user.id('id');
// delete the cookie.
cookie.remove(cookieKey);
// verify cookie is deleted.
assert.equal(cookie.get(cookieKey), null);
// verify id() returns the id even when cookie is deleted.
assert.equal(user.id(), 'id');
// verify cookie value is restored from localStorage.
assert.equal(cookie.get(cookieKey), 'id');
});
it('id() should not fallback to localStorage when disabled', function() {
var user = new User();
user.options({
localStorageFallbackDisabled: true
});
user.id('id');
// delete the cookie.
cookie.remove(cookieKey);
// verify cookie is deleted.
assert.equal(cookie.get(cookieKey), null);
// verify id() does not return the id when cookie is deleted.
assert.equal(user.id(), null);
});
it('should pick the old "_sio" anonymousId', function() {
rawCookie('_sio', 'anonymous-id----user-id');
var user = new User();
assert(user.anonymousId() === 'anonymous-id');
});
it('should not pick the old "_sio" if anonymous id is present', function() {
rawCookie('_sio', 'old-anonymous-id----user-id');
cookie.set('ajs_anonymous_id', 'new-anonymous-id');
assert(new User().anonymousId() === 'new-anonymous-id');
});
it('should create anonymous id if missing', function() {
var user = new User();
assert(user.anonymousId().length === 36);
});
it('should not overwrite anonymous id', function() {
cookie.set('ajs_anonymous_id', 'anonymous');
assert(new User().anonymousId() === 'anonymous');
});
});
describe('#id', function() {
describe('when cookies are disabled', function() {
beforeEach(function() {
sinon.stub(cookie, 'get', function() {});
user = new User();
});
afterEach(function() {
cookie.get.restore();
});
it('should get an id from the store', function() {
store.set(cookieKey, 'id');
assert(user.id() === 'id');
});
it('should get an id when not persisting', function() {
user.options({ persist: false });
user._id = 'id';
assert(user.id() === 'id');
});
it('should set an id to the store', function() {
user.id('id');
assert(store.get(cookieKey) === 'id');
});
it('should set the id when not persisting', function() {
user.options({ persist: false });
user.id('id');
assert(user._id === 'id');
});
it('should be null by default', function() {
assert(user.id() === null);
});
it('should not reset anonymousId if the user didnt have previous id', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('foo');
user.id('foo');
assert(user.anonymousId() === prev);
});
it('should reset anonymousId if the user id changed', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('baz');
assert(user.anonymousId() !== prev);
assert(user.anonymousId().length === 36);
});
it('should not reset anonymousId if the user id changed to null', function() {
var prev = user.anonymousId();
user.id('foo');
user.id(null);
assert(user.anonymousId() === prev);
assert(user.anonymousId().length === 36);
});
});
describe('when cookies and localStorage are disabled', function() {
beforeEach(function() {
sinon.stub(cookie, 'get', function() {});
store.enabled = false;
user = new User();
});
afterEach(function() {
store.enabled = true;
cookie.get.restore();
});
it('should get an id from the memory', function() {
memory.set(cookieKey, 'id');
assert(user.id() === 'id');
});
it('should get an id when not persisting', function() {
user.options({ persist: false });
user._id = 'id';
assert(user.id() === 'id');
});
it('should set an id to the memory', function() {
user.id('id');
assert(memory.get(cookieKey) === 'id');
});
it('should set the id when not persisting', function() {
user.options({ persist: false });
user.id('id');
assert(user._id === 'id');
});
it('should be null by default', function() {
assert(user.id() === null);
});
it('should not reset anonymousId if the user didnt have previous id', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('foo');
user.id('foo');
assert(user.anonymousId() === prev);
});
it('should reset anonymousId if the user id changed', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('baz');
assert(user.anonymousId() !== prev);
assert(user.anonymousId().length === 36);
});
it('should not reset anonymousId if the user id changed to null', function() {
var prev = user.anonymousId();
user.id('foo');
user.id(null);
assert(user.anonymousId() === prev);
assert(user.anonymousId().length === 36);
});
});
describe('when cookies are enabled', function() {
it('should get an id from the cookie', function() {
cookie.set(cookieKey, 'id');
assert(user.id() === 'id');
});
it('should get an id when not persisting', function() {
user.options({ persist: false });
user._id = 'id';
assert(user.id() === 'id');
});
it('should set an id to the cookie', function() {
user.id('id');
assert(cookie.get(cookieKey) === 'id');
});
it('should set the id when not persisting', function() {
user.options({ persist: false });
user.id('id');
assert(user._id === 'id');
});
it('should be null by default', function() {
assert(user.id() === null);
});
it('should not reset anonymousId if the user didnt have previous id', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('foo');
user.id('foo');
assert(user.anonymousId() === prev);
});
it('should reset anonymousId if the user id changed', function() {
var prev = user.anonymousId();
user.id('foo');
user.id('baz');
assert(user.anonymousId() !== prev);
assert(user.anonymousId().length === 36);
});
});
});
describe('#anonymousId', function() {
var noop = { set: function() {}, get: function() {} };
var storage = user.storage;
afterEach(function() {
user.storage = storage;
});
describe('when cookies are disabled', function() {
beforeEach(function() {
sinon.stub(cookie, 'get', function() {});
user = new User();
});
afterEach(function() {
cookie.get.restore();
});
it('should get an id from the store', function() {
store.set('ajs_anonymous_id', 'anon-id');
assert(user.anonymousId() === 'anon-id');
});
it('should set an id to the store', function() {
user.anonymousId('anon-id');
assert(store.get('ajs_anonymous_id') === 'anon-id');
});
it('should return anonymousId using the store', function() {
user.storage = function() {
return noop;
};
assert(user.anonymousId() === undefined);
});
});
describe('when cookies and localStorage are disabled', function() {
beforeEach(function() {
sinon.stub(cookie, 'get', function() {});
store.enabled = false;
user = new User();
});
afterEach(function() {
store.enabled = true;
cookie.get.restore();
});
it('should get an id from the memory', function() {
memory.set('ajs_anonymous_id', 'anon-id');
assert(user.anonymousId() === 'anon-id');
});
it('should set an id to the memory', function() {
user.anonymousId('anon-id');
assert(memory.get('ajs_anonymous_id') === 'anon-id');
});
it('should return anonymousId using the store', function() {
user.storage = function() {
return noop;
};
assert(user.anonymousId() === undefined);
});
});
describe('when cookies are enabled', function() {
it('should get an id from the cookie', function() {
cookie.set('ajs_anonymous_id', 'anon-id');
assert(user.anonymousId() === 'anon-id');
});
it('should set an id to the cookie', function() {
user.anonymousId('anon-id');
assert(cookie.get('ajs_anonymous_id') === 'anon-id');
});
it('should return anonymousId using the store', function() {
user.storage = function() {
return noop;
};
assert(user.anonymousId() === undefined);
});
it('should set anonymousId in both cookie and localStorage', function() {
var user = new User();
user.anonymousId('anon0');
assert.equal(cookie.get('ajs_anonymous_id'), 'anon0');
assert.equal(store.get('ajs_anonymous_id'), 'anon0');
});
it('should not set anonymousId in localStorage when localStorage fallback is disabled', function() {
var user = new User();
user.options({
localStorageFallbackDisabled: true
});
user.anonymousId('anon0');
assert.equal(cookie.get('ajs_anonymous_id'), 'anon0');
assert.equal(store.get('ajs_anonymous_id'), null);
});
it('should copy value from cookie to localStorage', function() {
var user = new User();
cookie.set('ajs_anonymous_id', 'anon1');
assert.equal(user.anonymousId(), 'anon1');
assert.equal(store.get('ajs_anonymous_id'), 'anon1');
});
it('should not copy value from cookie to localStorage when localStorage fallback is disabled', function() {
var user = new User();
user.options({
localStorageFallbackDisabled: true
});
cookie.set('ajs_anonymous_id', 'anon1');
assert.equal(user.anonymousId(), 'anon1');
assert.equal(store.get('ajs_anonymous_id'), null);
});
it('should fall back to localStorage when cookie is not set', function() {
var user = new User();
user.anonymousId('anon12');
assert.equal(cookie.get('ajs_anonymous_id'), 'anon12');
// delete the cookie
cookie.remove('ajs_anonymous_id');
assert.equal(cookie.get('ajs_anonymous_id'), null);
// verify anonymousId() returns the correct id even when there's no cookie
assert.equal(user.anonymousId(), 'anon12');
// verify cookie value is restored from localStorage
assert.equal(cookie.get('ajs_anonymous_id'), 'anon12');
});
it('should not fall back to localStorage when cookie is not set and localStorage fallback is disabled', function() {
var user = new User();
user.options({
localStorageFallbackDisabled: true
});
user.anonymousId('anon12');
assert.equal(cookie.get('ajs_anonymous_id'), 'anon12');
// delete the cookie
cookie.remove('ajs_anonymous_id');
assert.equal(cookie.get('ajs_anonymous_id'), null);
// verify anonymousId() does not return the id when there's no cookie.
assert.notEqual(user.anonymousId(), 'anon12');
});
it('should write to both cookie and localStorage when generating a new anonymousId', function() {
var user = new User();
var anonId = user.anonymousId();
assert.notEqual(anonId, null);
assert.equal(cookie.get('ajs_anonymous_id'), anonId);
assert.equal(store.get('ajs_anonymous_id'), anonId);
});
it('should not write to both cookie and localStorage when generating a new anonymousId and localStorage fallback is disabled', function() {
var user = new User();
user.options({
localStorageFallbackDisabled: true
});
var anonId = user.anonymousId();
assert.notEqual(anonId, null);
assert.equal(cookie.get('ajs_anonymous_id'), anonId);
assert.equal(store.get('ajs_anonymous_id'), null);
});
});
});
describe('#traits', function() {
it('should get traits', function() {
store.set(localStorageKey, { trait: true });
assert.deepEqual(user.traits(), { trait: true });
});
it('should get a copy of traits', function() {
store.set(localStorageKey, { trait: true });
assert(user.traits() !== user._traits);
});
it('should get traits when not persisting', function() {
user.options({ persist: false });
user._traits = { trait: true };
assert.deepEqual(user.traits(), { trait: true });
});
it('should get a copy of traits when not persisting', function() {
user.options({ persist: false });
user._traits = { trait: true };
assert(user.traits() !== user._traits);
});
it('should set traits', function() {
user.traits({ trait: true });
assert(store.get(localStorageKey), { trait: true });
});
it('should set the id when not persisting', function() {
user.options({ persist: false });
user.traits({ trait: true });
assert.deepEqual(user._traits, { trait: true });
});
it('should default traits to an empty object', function() {
user.traits(null);
assert.deepEqual(store.get(localStorageKey), {});
});
it('should default traits to an empty object when not persisting', function() {
user.options({ persist: false });
user.traits(null);
assert.deepEqual(user._traits, {});
});
it('should be an empty object by default', function() {
assert.deepEqual(user.traits(), {});
});
});
describe('#options', function() {
it('should get options', function() {
assert(user.options() === user._options);
});
it('should set options with defaults', function() {
user.options({ option: true });
assert.deepEqual(user._options, {
option: true,
persist: true,
cookie: {
key: 'ajs_user_id',
oldKey: 'ajs_user'
},
localStorage: {
key: 'ajs_user_traits'
}
});
});
});
describe('#save', function() {
it('should save an id to a cookie', function() {
user.id('id');
user.save();
assert(cookie.get(cookieKey) === 'id');
});
it('should save an id to localStorage', function() {
user.id('id');
user.save();
assert.equal(store.get(cookieKey), 'id');
});
it('should not save an id to localStorage when localStorage fallback is disabled', function() {
user.options({
localStorageFallbackDisabled: true
});
user.id('id');
user.save();
assert.equal(store.get(cookieKey), null);
});
it('should save traits to local storage', function() {
user.traits({ trait: true });
user.save();
assert(store.get(localStorageKey), { trait: true });
});
it('shouldnt save if persist is false', function() {
user.options({ persist: false });
user.id('id');
user.save();
assert(cookie.get(cookieKey) === null);
});
});
describe('#logout', function() {
it('should reset an id and traits', function() {
user.id('id');
user.anonymousId('anon-id');
user.traits({ trait: true });
user.logout();
assert(cookie.get('ajs_anonymous_id') === null);
assert(user.id() === null);
assert(user.traits(), {});
});
it('should clear id in cookie', function() {
user.id('id');
user.save();
user.logout();
assert(cookie.get(cookieKey) === null);
});
it('should clear id in local storage', function() {
user.id('id');
user.save();
user.logout();
assert(store.get(cookieKey) === undefined);
});
it('should clear traits in local storage', function() {
user.traits({ trait: true });
user.save();
user.logout();
assert(store.get(localStorageKey) === undefined);
});
});
describe('#identify', function() {
it('should save an id', function() {
user.identify('id');
assert(user.id() === 'id');
assert(cookie.get(cookieKey) === 'id');
});
it('should save traits', function() {
user.identify(null, { trait: true });
assert.deepEqual(user.traits(), { trait: true });
assert.deepEqual(store.get(localStorageKey), { trait: true });
});
it('should save an id and traits', function() {
user.identify('id', { trait: true });
assert(user.id() === 'id');
assert.deepEqual(user.traits(), { trait: true });
assert(cookie.get(cookieKey) === 'id');
assert.deepEqual(store.get(localStorageKey), { trait: true });
});
it('should extend existing traits', function() {
user.traits({ one: 1 });
user.identify('id', { two: 2 });
assert.deepEqual(user.traits(), { one: 1, two: 2 });
assert.deepEqual(store.get(localStorageKey), { one: 1, two: 2 });
});
it('shouldnt extend existing traits for a new id', function() {
user.id('id');
user.traits({ one: 1 });
user.identify('new', { two: 2 });
assert.deepEqual(user.traits(), { two: 2 });
assert.deepEqual(store.get(localStorageKey), { two: 2 });
});
it('should reset traits for a new id', function() {
user.id('id');
user.traits({ one: 1 });
user.identify('new');
assert.deepEqual(user.traits(), {});
assert.deepEqual(store.get(localStorageKey), {});
});
});
describe('#load', function() {
it('should load an empty user', function() {
user.load();
assert(user.id() === null);
assert.deepEqual(user.traits(), {});
});
it('should load an id from a cookie', function() {
cookie.set(cookieKey, 'id');
user.load();
assert(user.id() === 'id');
});
it('should load traits from local storage', function() {
store.set(localStorageKey, { trait: true });
user.load();
assert.deepEqual(user.traits(), { trait: true });
});
it('should load from an old cookie', function() {
cookie.set(user._options.cookie.oldKey, {
id: 'old',
traits: { trait: true }
});
user.load();
assert(user.id() === 'old');
assert.deepEqual(user.traits(), { trait: true });
});
});
}); | the_stack |
import { XStore, XType, XString } from '../pastore';
import { delay } from './helpers';
interface SimpleState extends XType {
name: string,
age: number,
isMale: boolean,
pets: Array<{
name: string,
age: number,
isDog: boolean
}>,
address: {
province: string,
city: string,
homeInfo: {
isRend: {
value: boolean
}
}
}
}
let myStore = new XStore<SimpleState>({
name: 'Peter',
age: 10,
isMale: true,
pets: [
{
name: 'Puppy',
age: 1,
isDog: true
}
],
address: {
province: 'GD',
city: 'GZ',
homeInfo: {
isRend: {
value: true
}
}
}
});
describe('Test: Store.getValueByPath', function () {
// pending()
it('root', function () {
expect(XStore.getValueByPath(myStore.imState, [])).toBe(myStore.imState)
})
it('root prop', function () {
expect(XStore.getValueByPath(myStore.imState, ['name'])).toBe(myStore.imState.name)
})
it('array prop', function () {
expect(XStore.getValueByPath(myStore.imState, ['pets'])).toBe(myStore.imState.pets)
expect(XStore.getValueByPath(myStore.imState, ['pets', '0'])).toBe(myStore.imState.pets[0])
})
it('nested prop', function () {
expect(XStore.getValueByPath(myStore.imState, ['pets', '0', 'name'])).toBe(myStore.imState.pets[0].name)
})
})
describe('Test: Store.updateReferenceInPath', function () {
// pending();
myStore.preState = myStore.imState;
Object.freeze(myStore.preState);
Object.freeze(myStore.preState.address);
Object.freeze(myStore.preState.address.homeInfo);
Object.freeze(myStore.preState.address.homeInfo.isRend);
myStore.getNewReference(['address', 'homeInfo'])
describe('update reference in path', function () {
it('reference did updated', function () {
expect(myStore.imState).not.toBe(myStore.preState);
expect(myStore.imState.address).not.toBe(myStore.preState.address);
expect(myStore.imState.address.homeInfo).not.toBe(myStore.preState.address.homeInfo);
})
it('keep value and __path__ in path', function () {
expect(myStore.imState).toEqual(myStore.preState);
expect((myStore.imState as XType).__xpath__)
.toEqual((myStore.preState as XType).__xpath__);
expect((myStore.imState.address as XType).__xpath__)
.toEqual((myStore.preState.address as XType).__xpath__);
expect((myStore.imState.address.homeInfo as XType).__xpath__)
.toEqual((myStore.preState.address.homeInfo as XType).__xpath__);
})
})
describe('keep reference out of path', function () {
it('keep reference out of path: brother path', function () {
expect(myStore.imState.pets).toBe(myStore.preState.pets)
})
it('keep reference out of path: child path', function () {
expect(myStore.imState.address.homeInfo.isRend).toBe(myStore.preState.address.homeInfo.isRend)
})
})
})
describe('Test: reduce operation "set" ', function () {
// pending();
describe('primitive value test', function () {
// pending();
it('toBe', function () {
expect('test').toBe('test')
expect(new String('test')).not.toBe('test')
expect(new String('test')).not.toBe(new String('test'))
})
it('toEqual', function () {
expect('test').toEqual('test')
expect(new String('test')).toEqual('test')
expect(new String('test')).toEqual(new String('test'))
})
})
describe('set at root', function () {
describe('simple value', function () {
afterEach(function () {
// 撤销产生的影响
myStore.imState = myStore.preState;
})
// 更新整个state值,支持但不推荐
it('entire root state', async function () {
myStore.preState = myStore.imState;
myStore.set(myStore.imState, {
name: 'Amy',
age: 12,
isMale: false,
pets: [
{
name: 'Kitty',
age: 2,
isDog: false
}
],
address: {
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
}
})
expect(myStore.imState).toEqual(myStore.preState)
await delay(0)
expect(myStore.imState).toEqual(myStore.toXType({
name: 'Amy',
age: 12,
isMale: false,
pets: [
{
name: 'Kitty',
age: 2,
isDog: false
}
],
address: {
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
}
}, ''))
})
it('bool', async function () {
myStore.set(myStore.imState.isMale, false)
expect(myStore.imState.isMale).toEqual(true)
await delay(0)
expect(myStore.imState.isMale).toEqual(false)
expect((myStore.imState.isMale as XType).__xpath__).toEqual('.isMale')
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.isMale).not.toBe(myStore.preState.isMale)
})
it('number', async function () {
myStore.set(myStore.imState.age, 12)
expect(myStore.imState.age).toEqual(10)
await delay(0)
expect(myStore.imState.age).toEqual(12)
expect((myStore.imState.age as XType).__xpath__).toEqual('.age')
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.age).not.toBe(myStore.preState.age)
})
it('string', async function () {
myStore.set(myStore.imState.name, 'Amy')
expect(myStore.imState.name).toEqual('Peter')
await delay(0)
expect(myStore.imState.name).toEqual('Amy')
expect((myStore.imState.name as XType).__xpath__).toEqual('.name')
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.name).not.toBe(myStore.preState.name)
})
})
describe('reference value', function () {
// pending()
afterEach(function () {
// 撤销产生的影响
myStore.imState = myStore.preState;
})
it('array', async function () {
myStore.set(myStore.imState.pets, [{
name: 'Kitty',
age: 2,
isDog: false
}])
expect(myStore.imState.pets).toEqual(myStore.preState.pets)
await delay(0)
expect(myStore.imState.pets).toEqual(myStore.toXType([{
name: 'Kitty',
age: 2,
isDog: false
}], '.pets'))
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
})
it('object', async function () {
myStore.set(myStore.imState.address, {
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
})
expect(myStore.imState.address).toEqual(myStore.preState.address)
await delay(0)
expect(myStore.imState.address).toEqual(myStore.toXType({
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
}, '.address'))
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.address).not.toBe(myStore.preState.address)
})
})
describe('batch operations', function () {
const lifeCycleFunc = [
'stateWillReduceOperations',
'stateWillApplyOperation',
'stateDidAppliedOperation',
'stateDidReducedOperations'
]
let callSequence: Array<string> = [];
/** 生命周期函数测试 */
beforeAll(function () {
// 尝试修改生命周期函数
lifeCycleFunc.forEach(funcName => {
myStore[funcName] = function () {
callSequence.push(funcName)
return true
}
})
})
afterEach(function () {
// 撤销产生的影响
myStore.imState = myStore.preState;
})
it('batch operations test', async function () {
/** 在实际使用中,连续设置多个同级的值,可用 merge 代替多个 set */
let state = myStore.imState;
myStore.set(state.isMale, false)
.set(state.age, 12)
.set(state.name, 'Amy')
.set(state.pets, [{
name: 'Kitty',
age: 2,
isDog: false
}])
.set(state.address, {
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
})
expect(myStore.imState).toEqual(myStore.toXType({
name: 'Peter',
age: 10,
isMale: true,
pets: [
{
name: 'Puppy',
age: 1,
isDog: true
}
],
address: {
province: 'GD',
city: 'GZ',
homeInfo: {
isRend: {
value: true
}
}
}
}, ''))
await delay(0)
expect(myStore.imState).toEqual(myStore.toXType({
name: 'Amy',
age: 12,
isMale: false,
pets: [
{
name: 'Kitty',
age: 2,
isDog: false
}
],
address: {
province: 'ZJ',
city: 'HZ',
homeInfo: {
isRend: {
value: false
}
}
}
}, ''))
// 生命周期函数调用检查
expect(callSequence).toEqual([
lifeCycleFunc[0],
lifeCycleFunc[1], lifeCycleFunc[2],
lifeCycleFunc[1], lifeCycleFunc[2],
lifeCycleFunc[1], lifeCycleFunc[2],
lifeCycleFunc[1], lifeCycleFunc[2],
lifeCycleFunc[1], lifeCycleFunc[2],
lifeCycleFunc[3],
])
})
})
})
describe('set at nested prop', function () {
afterEach(function () {
// 撤销产生的影响
myStore.imState = myStore.preState;
})
it('nested object', async function () {
myStore.set(myStore.imState.address.homeInfo.isRend.value, false);
expect(myStore.imState.address.homeInfo.isRend.value).toEqual(true);
await delay(0)
expect(myStore.imState.address.homeInfo.isRend.value).toEqual(false)
expect((myStore.imState.address.homeInfo.isRend.value as XType).__xpath__).toEqual('.address.homeInfo.isRend.value')
// 主路引用更新
expect(myStore.imState).not.toBe(myStore.preState)
expect(myStore.imState.address).not.toBe(myStore.preState.address)
expect(myStore.imState.address.homeInfo).not.toBe(myStore.preState.address.homeInfo)
expect(myStore.imState.address.homeInfo.isRend).not.toBe(myStore.preState.address.homeInfo.isRend)
// 旁路引用不更新
expect(myStore.imState.pets).toBe(myStore.preState.pets)
})
it('nested array element', async function () {
myStore.set(myStore.imState.pets[0], {
name: 'Kitty',
age: 2,
isDog: false
});
expect(myStore.imState.pets[0]).toEqual(myStore.preState.pets[0]);
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
name: 'Kitty',
age: 2,
isDog: false
}, '.pets.0'));
})
it('nested array object prop', async function () {
myStore.set(myStore.imState.pets[0].name, 'Kitty');
expect(myStore.imState.pets[0].name).toEqual(myStore.preState.pets[0].name);
await delay(0)
expect(myStore.imState.pets[0].name).toEqual(myStore.toXType('Kitty', '.pets.0.name'));
})
})
})
describe('Test: reduce operation "merge" ', function () {
// pending();
let spy_stateDidReducedOperations: jasmine.Spy
beforeAll(function () {
spy_stateDidReducedOperations = spyOn(myStore, 'stateDidReducedOperations').and.callThrough()
myStore.preState = myStore.imState;
})
afterEach(function () {
spy_stateDidReducedOperations.calls.reset();
myStore.imState = myStore.preState;
})
describe('throw Error when the partial state to reduce is not raw Object', function () {
// pending()
it('boolean', async function () {
myStore.merge(myStore.imState.isMale, true)
await delay(0)
expect(spy_stateDidReducedOperations.calls.count()).toEqual(1)
expect(spy_stateDidReducedOperations.calls.mostRecent().args[0].isDone).toEqual(false)
expect(myStore.imState.isMale).toEqual(true)
})
it('number', async function () {
myStore.merge(myStore.imState.age, 12)
await delay(0)
expect(spy_stateDidReducedOperations.calls.count()).toEqual(1)
expect(spy_stateDidReducedOperations.calls.mostRecent().args[0].isDone).toEqual(false)
expect(myStore.imState.age).toEqual(10)
})
it('string', async function () {
myStore.merge(myStore.imState.name, 'Amy')
await delay(0)
expect(spy_stateDidReducedOperations.calls.count()).toEqual(1)
expect(spy_stateDidReducedOperations.calls.mostRecent().args[0].isDone).toEqual(false)
expect(myStore.imState.name).toEqual('Peter')
})
})
describe('can merge value at root', function () {
it('merge simple value', async function () {
myStore.merge(myStore.imState, {
isMale: false,
age: 12,
name: 'Amy'
})
await delay(0)
expect(myStore.imState.isMale).toEqual(false)
expect(myStore.imState.age).toEqual(12)
expect(myStore.imState.name).toEqual('Amy')
// 保留未被 merge 的值
expect(myStore.imState.pets).toBe(myStore.preState.pets)
expect(myStore.imState.address).toBe(myStore.preState.address)
})
it('merge array value', async function () {
myStore.merge(myStore.imState, {
pets: [{
name: 'Kitty',
age: 2,
isDog: false
}]
})
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
name: 'Kitty',
age: 2,
isDog: false
}, '.pets.0'))
})
it('can merge array and object, shadow merge', async function () {
myStore.merge(myStore.imState as any, {
address: {
province: 'ZJ'
}
})
await delay(0)
// shadow merge
expect(myStore.imState.address).toEqual(myStore.toXType({
province: 'ZJ'
}, '.address'))
})
})
describe('can merge value at nested prop', function () {
it('merge through nested object', async function () {
myStore.merge(myStore.imState.address.homeInfo.isRend, {
value: false
})
await delay(0)
expect(myStore.imState.address.homeInfo.isRend.value).toEqual(false)
// 引用更新检查
expect(myStore.imState.address.homeInfo.isRend).not.toBe(myStore.preState.address.homeInfo.isRend)
expect(myStore.imState.address.homeInfo).not.toBe(myStore.preState.address.homeInfo)
expect(myStore.imState.address).not.toBe(myStore.preState.address)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('merge through nested array', async function () {
myStore.merge(myStore.imState.pets[0], {
age: 2,
isDog: false,
})
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
age: 2,
isDog: false,
name: 'Puppy' // 保留原值
}, '.pets.0'))
// 引用更新检查
expect(myStore.imState.pets[0]).not.toBe(myStore.preState.pets[0])
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
})
})
describe('Test: reduce operation "update" ', function (){
beforeAll(function () {
myStore.preState = myStore.imState;
})
describe('update simple value', function(){
// pending()
afterEach(function () {
myStore.imState = myStore.preState;
})
// 对于boolean 值的特殊情况
it('boolean', async function(){
expect(myStore.imState.isMale).toEqual(true)
myStore.update(myStore.imState.isMale, b => b == false)
await delay(0)
expect(myStore.imState.isMale).toEqual(false)
myStore.update(myStore.imState.isMale, b => b == false)
await delay(0)
expect(myStore.imState.isMale).toEqual(true)
})
it('number', async function(){
myStore.update(myStore.imState.age, a => a + 1)
await delay(0)
expect(myStore.imState.age).toEqual(11)
myStore.update(myStore.imState.age, a => a - 1)
await delay(0)
expect(myStore.imState.age).toEqual(10)
})
it('string', async function(){
myStore.update(myStore.imState.name, str => 'Mr.' + str)
await delay(0)
expect(myStore.imState.name).toEqual('Mr.Peter')
myStore.update(myStore.imState.name, str => 'Miss.' + str)
await delay(0)
expect(myStore.imState.name).toEqual('Miss.Mr.Peter')
})
})
describe('array value', function(){
// pending()
// 对于boolean 值的特殊情况
it('array node: via array method', async function(){
myStore.update(myStore.imState.pets, pets => {
pets.push({
name: 'Kitty',
age: 2,
isDog: false
})
return pets
})
await delay(0)
expect(myStore.imState.pets.length).toBe(2)
expect(myStore.imState.pets[0]).toBe(myStore.preState.pets[0])
expect(myStore.imState.pets[1]).toEqual(myStore.toXType({
name: 'Kitty',
age: 2,
isDog: false
}, '.pets.1'))
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('array node: via array spread operator', async function(){
myStore.update(myStore.imState.pets, pets => [
... pets,
{
name: 'Kitty2',
age: 2,
isDog: false
}
])
await delay(0)
expect(myStore.imState.pets.length).toBe(3)
expect(myStore.imState.pets[0]).toBe(myStore.preState.pets[0])
expect(myStore.imState.pets[1]).toBe(myStore.preState.pets[1])
expect(myStore.imState.pets[2]).toEqual(myStore.toXType({
name: 'Kitty2',
age: 2,
isDog: false
}, '.pets.2'))
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('array elements: map update, via object method', async function(){
// 更新
myStore.update(myStore.imState.pets, pets => pets.map( pet => Object.assign({}, pet, {
... pet,
age: 4,
isDog: false
})))
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
name: 'Puppy',
age: 4,
isDog: false
}, '.pets.0'))
expect(myStore.imState.pets[1]).toEqual(myStore.toXType({
name: 'Kitty',
age: 4,
isDog: false
}, '.pets.1'))
expect(myStore.imState.pets[2]).toEqual(myStore.toXType({
name: 'Kitty2',
age: 4,
isDog: false
}, '.pets.2'))
expect(myStore.imState.pets[0]).not.toBe(myStore.preState.pets[0])
expect(myStore.imState.pets[1]).not.toBe(myStore.preState.pets[1])
expect(myStore.imState.pets[2]).not.toBe(myStore.preState.pets[2])
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('array elements: map update, via object spread operator', async function(){
// 更新
myStore.update(myStore.imState.pets, pets => pets.map( pet => ({
... pet,
age: 5,
isDog: true
})))
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
name: 'Puppy',
age: 5,
isDog: true
}, '.pets.0'))
expect(myStore.imState.pets[1]).toEqual(myStore.toXType({
name: 'Kitty',
age: 5,
isDog: true
}, '.pets.1'))
expect(myStore.imState.pets[2]).toEqual(myStore.toXType({
name: 'Kitty2',
age: 5,
isDog: true
}, '.pets.2'))
expect(myStore.imState.pets[0]).not.toBe(myStore.preState.pets[0])
expect(myStore.imState.pets[1]).not.toBe(myStore.preState.pets[1])
expect(myStore.imState.pets[2]).not.toBe(myStore.preState.pets[2])
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('array elements: map update, via forEach', async function(){
// 更新
myStore.imState.pets.forEach( pet => {
myStore.update(pet, _pet => ({
... _pet,
age: 6,
isDog: false
}))
})
await delay(0)
expect(myStore.imState.pets[0]).toEqual(myStore.toXType({
name: 'Puppy',
age: 6,
isDog: false
}, '.pets.0'))
expect(myStore.imState.pets[1]).toEqual(myStore.toXType({
name: 'Kitty',
age: 6,
isDog: false
}, '.pets.1'))
expect(myStore.imState.pets[2]).toEqual(myStore.toXType({
name: 'Kitty2',
age: 6,
isDog: false
}, '.pets.2'))
expect(myStore.imState.pets[0]).not.toBe(myStore.preState.pets[0])
expect(myStore.imState.pets[1]).not.toBe(myStore.preState.pets[1])
expect(myStore.imState.pets[2]).not.toBe(myStore.preState.pets[2])
expect(myStore.imState.pets).not.toBe(myStore.preState.pets)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('array continuous push', async function(){
myStore.update(myStore.imState.pets, pets => [...pets, {
name: 'Kitty',
age: 6,
isDog: false
}])
myStore.update(myStore.imState.pets, pets => [...pets, {
name: 'Kitty',
age: 6,
isDog: false
}])
expect(myStore.imState.pets.length).toEqual(3)
await delay(0)
expect(myStore.imState.pets.length).toEqual(5)
})
})
describe('object value', function(){
it('update nested value', async function(){
myStore.update(myStore.imState.address.city, c => c + ' good')
await delay(0)
expect(myStore.imState.address.city).toEqual('GZ good')
expect(myStore.imState.address).not.toBe(myStore.preState.address)
expect(myStore.imState).not.toBe(myStore.preState)
})
it('update nested object', async function(){
myStore.update(myStore.imState.address.homeInfo.isRend as any, o => ({
type: 'old',
value: o.value == false
}))
await delay(0)
expect(myStore.imState.address.homeInfo.isRend).toEqual(myStore.toXType({
type: 'old',
value: false
}, '.address.homeInfo.isRend'))
expect(myStore.imState.address.homeInfo.isRend).not.toBe(myStore.preState.address.homeInfo.isRend)
expect(myStore.imState.address.homeInfo).not.toBe(myStore.preState.address.homeInfo)
expect(myStore.imState.address).not.toBe(myStore.preState.address)
expect(myStore.imState).not.toBe(myStore.preState)
})
})
}) | the_stack |
import UnitBezier from '@mapbox/unitbezier';
import Point from './point';
import type {Callback} from '../types/callback';
/**
* @module util
* @private
*/
/**
* Given a value `t` that varies between 0 and 1, return
* an interpolation function that eases between 0 and 1 in a pleasing
* cubic in-out fashion.
*
* @private
*/
export function easeCubicInOut(t: number): number {
if (t <= 0) return 0;
if (t >= 1) return 1;
const t2 = t * t,
t3 = t2 * t;
return 4 * (t < 0.5 ? t3 : 3 * (t - t2) + t3 - 0.75);
}
/**
* Given given (x, y), (x1, y1) control points for a bezier curve,
* return a function that interpolates along that curve.
*
* @param p1x control point 1 x coordinate
* @param p1y control point 1 y coordinate
* @param p2x control point 2 x coordinate
* @param p2y control point 2 y coordinate
* @private
*/
export function bezier(p1x: number, p1y: number, p2x: number, p2y: number): (t: number) => number {
const bezier = new UnitBezier(p1x, p1y, p2x, p2y);
return function(t: number) {
return bezier.solve(t);
};
}
/**
* A default bezier-curve powered easing function with
* control points (0.25, 0.1) and (0.25, 1)
*
* @private
*/
export const ease = bezier(0.25, 0.1, 0.25, 1);
/**
* constrain n to the given range via min + max
*
* @param n value
* @param min the minimum value to be returned
* @param max the maximum value to be returned
* @returns the clamped value
* @private
*/
export function clamp(n: number, min: number, max: number): number {
return Math.min(max, Math.max(min, n));
}
/**
* constrain n to the given range, excluding the minimum, via modular arithmetic
*
* @param n value
* @param min the minimum value to be returned, exclusive
* @param max the maximum value to be returned, inclusive
* @returns constrained number
* @private
*/
export function wrap(n: number, min: number, max: number): number {
const d = max - min;
const w = ((n - min) % d + d) % d + min;
return (w === min) ? max : w;
}
/*
* Call an asynchronous function on an array of arguments,
* calling `callback` with the completed results of all calls.
*
* @param array input to each call of the async function.
* @param fn an async function with signature (data, callback)
* @param callback a callback run after all async work is done.
* called with an array, containing the results of each async call.
* @private
*/
export function asyncAll<Item, Result>(
array: Array<Item>,
fn: (item: Item, fnCallback: Callback<Result>) => void,
callback: Callback<Array<Result>>
) {
if (!array.length) { return callback(null, []); }
let remaining = array.length;
const results = new Array(array.length);
let error = null;
array.forEach((item, i) => {
fn(item, (err, result) => {
if (err) error = err;
results[i] = (result as any as Result); // https://github.com/facebook/flow/issues/2123
if (--remaining === 0) callback(error, results);
});
});
}
/*
* Compute the difference between the keys in one object and the keys
* in another object.
*
* @returns keys difference
* @private
*/
export function keysDifference<S, T>(
obj: {[key: string]: S},
other: {[key: string]: T}
): Array<string> {
const difference = [];
for (const i in obj) {
if (!(i in other)) {
difference.push(i);
}
}
return difference;
}
/**
* Given a destination object and optionally many source objects,
* copy all properties from the source objects into the destination.
* The last source object given overrides properties from previous
* source objects.
*
* @param dest destination object
* @param sources sources from which properties are pulled
* @private
*/
export function extend(dest: any, ...sources: Array<any>): any {
for (const src of sources) {
for (const k in src) {
dest[k] = src[k];
}
}
return dest;
}
/**
* Given an object and a number of properties as strings, return version
* of that object with only those properties.
*
* @param src the object
* @param properties an array of property names chosen
* to appear on the resulting object.
* @returns object with limited properties.
* @example
* var foo = { name: 'Charlie', age: 10 };
* var justName = pick(foo, ['name']);
* // justName = { name: 'Charlie' }
* @private
*/
export function pick(src: any, properties: Array<string>): any {
const result = {};
for (let i = 0; i < properties.length; i++) {
const k = properties[i];
if (k in src) {
result[k] = src[k];
}
}
return result;
}
let id = 1;
/**
* Return a unique numeric id, starting at 1 and incrementing with
* each call.
*
* @returns unique numeric id.
* @private
*/
export function uniqueId(): number {
return id++;
}
/**
* Return whether a given value is a power of two
* @private
*/
export function isPowerOfTwo(value: number): boolean {
return (Math.log(value) / Math.LN2) % 1 === 0;
}
/**
* Return the next power of two, or the input value if already a power of two
* @private
*/
export function nextPowerOfTwo(value: number): number {
if (value <= 1) return 1;
return Math.pow(2, Math.ceil(Math.log(value) / Math.LN2));
}
/**
* Given an array of member function names as strings, replace all of them
* with bound versions that will always refer to `context` as `this`. This
* is useful for classes where otherwise event bindings would reassign
* `this` to the evented object or some other value: this lets you ensure
* the `this` value always.
*
* @param fns list of member function names
* @param context the context value
* @example
* function MyClass() {
* bindAll(['ontimer'], this);
* this.name = 'Tom';
* }
* MyClass.prototype.ontimer = function() {
* alert(this.name);
* };
* var myClass = new MyClass();
* setTimeout(myClass.ontimer, 100);
* @private
*/
export function bindAll(fns: Array<string>, context: any): void {
fns.forEach((fn) => {
if (!context[fn]) { return; }
context[fn] = context[fn].bind(context);
});
}
/**
* Create an object by mapping all the values of an existing object while
* preserving their keys.
*
* @private
*/
export function mapObject(input: any, iterator: Function, context?: any): any {
const output = {};
for (const key in input) {
output[key] = iterator.call(context || this, input[key], key, input);
}
return output;
}
/**
* Create an object by filtering out values of an existing object.
*
* @private
*/
export function filterObject(input: any, iterator: Function, context?: any): any {
const output = {};
for (const key in input) {
if (iterator.call(context || this, input[key], key, input)) {
output[key] = input[key];
}
}
return output;
}
import deepEqual from '../style-spec/util/deep_equal';
export {deepEqual};
/**
* Deeply clones two objects.
*
* @private
*/
export function clone<T>(input: T): T {
if (Array.isArray(input)) {
return input.map(clone) as any as T;
} else if (typeof input === 'object' && input) {
return mapObject(input, clone) as any as T;
} else {
return input;
}
}
/**
* Check if two arrays have at least one common element.
*
* @private
*/
export function arraysIntersect<T>(a: Array<T>, b: Array<T>): boolean {
for (let l = 0; l < a.length; l++) {
if (b.indexOf(a[l]) >= 0) return true;
}
return false;
}
/**
* Print a warning message to the console and ensure duplicate warning messages
* are not printed.
*
* @private
*/
const warnOnceHistory: {[key: string]: boolean} = {};
export function warnOnce(message: string): void {
if (!warnOnceHistory[message]) {
// console isn't defined in some WebWorkers, see #2558
if (typeof console !== 'undefined') console.warn(message);
warnOnceHistory[message] = true;
}
}
/**
* Indicates if the provided Points are in a counter clockwise (true) or clockwise (false) order
*
* @private
* @returns true for a counter clockwise set of points
*/
// http://bryceboe.com/2006/10/23/line-segment-intersection-algorithm/
export function isCounterClockwise(a: Point, b: Point, c: Point): boolean {
return (c.y - a.y) * (b.x - a.x) > (b.y - a.y) * (c.x - a.x);
}
/**
* Returns the signed area for the polygon ring. Positive areas are exterior rings and
* have a clockwise winding. Negative areas are interior rings and have a counter clockwise
* ordering.
*
* @private
* @param ring Exterior or interior ring
*/
export function calculateSignedArea(ring: Array<Point>): number {
let sum = 0;
for (let i = 0, len = ring.length, j = len - 1, p1, p2; i < len; j = i++) {
p1 = ring[i];
p2 = ring[j];
sum += (p2.x - p1.x) * (p1.y + p2.y);
}
return sum;
}
/**
* Detects closed polygons, first + last point are equal
*
* @private
* @param points array of points
* @return true if the points are a closed polygon
*/
export function isClosedPolygon(points: Array<Point>): boolean {
// If it is 2 points that are the same then it is a point
// If it is 3 points with start and end the same then it is a line
if (points.length < 4)
return false;
const p1 = points[0];
const p2 = points[points.length - 1];
if (Math.abs(p1.x - p2.x) > 0 ||
Math.abs(p1.y - p2.y) > 0) {
return false;
}
// polygon simplification can produce polygons with zero area and more than 3 points
return Math.abs(calculateSignedArea(points)) > 0.01;
}
/**
* Converts spherical coordinates to cartesian coordinates.
*
* @private
* @param spherical Spherical coordinates, in [radial, azimuthal, polar]
* @return cartesian coordinates in [x, y, z]
*/
export function sphericalToCartesian([r, azimuthal, polar]: [number, number, number]): {
x: number;
y: number;
z: number;
} {
// We abstract "north"/"up" (compass-wise) to be 0° when really this is 90° (π/2):
// correct for that here
azimuthal += 90;
// Convert azimuthal and polar angles to radians
azimuthal *= Math.PI / 180;
polar *= Math.PI / 180;
return {
x: r * Math.cos(azimuthal) * Math.sin(polar),
y: r * Math.sin(azimuthal) * Math.sin(polar),
z: r * Math.cos(polar)
};
}
/* global self, WorkerGlobalScope */
/**
* Returns true if the when run in the web-worker context.
*
* @private
* @returns {boolean}
*/
export function isWorker(): boolean {
return typeof WorkerGlobalScope !== 'undefined' && typeof self !== 'undefined' &&
self instanceof WorkerGlobalScope;
}
/**
* Parses data from 'Cache-Control' headers.
*
* @private
* @param cacheControl Value of 'Cache-Control' header
* @return object containing parsed header info.
*/
export function parseCacheControl(cacheControl: string): any {
// Taken from [Wreck](https://github.com/hapijs/wreck)
const re = /(?:^|(?:\s*\,\s*))([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)(?:\=(?:([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)|(?:\"((?:[^"\\]|\\.)*)\")))?/g;
const header = {};
cacheControl.replace(re, ($0, $1, $2, $3) => {
const value = $2 || $3;
header[$1] = value ? value.toLowerCase() : true;
return '';
});
if (header['max-age']) {
const maxAge = parseInt(header['max-age'], 10);
if (isNaN(maxAge)) delete header['max-age'];
else header['max-age'] = maxAge;
}
return header;
}
let _isSafari = null;
/**
* Returns true when run in WebKit derived browsers.
* This is used as a workaround for a memory leak in Safari caused by using Transferable objects to
* transfer data between WebWorkers and the main thread.
* https://github.com/mapbox/mapbox-gl-js/issues/8771
*
* This should be removed once the underlying Safari issue is fixed.
*
* @private
* @param scope {WindowOrWorkerGlobalScope} Since this function is used both on the main thread and WebWorker context,
* let the calling scope pass in the global scope object.
* @returns {boolean}
*/
export function isSafari(scope: any): boolean {
if (_isSafari == null) {
const userAgent = scope.navigator ? scope.navigator.userAgent : null;
_isSafari = !!scope.safari ||
!!(userAgent && (/\b(iPad|iPhone|iPod)\b/.test(userAgent) || (!!userAgent.match('Safari') && !userAgent.match('Chrome'))));
}
return _isSafari;
}
export function storageAvailable(type: string): boolean {
try {
const storage = window[type];
storage.setItem('_mapbox_test_', 1);
storage.removeItem('_mapbox_test_');
return true;
} catch (e) {
return false;
}
}
// The following methods are from https://developer.mozilla.org/en-US/docs/Web/API/WindowBase64/Base64_encoding_and_decoding#The_Unicode_Problem
//Unicode compliant base64 encoder for strings
export function b64EncodeUnicode(str: string) {
return btoa(
encodeURIComponent(str).replace(/%([0-9A-F]{2})/g,
(match, p1) => {
return String.fromCharCode(Number('0x' + p1)); //eslint-disable-line
}
)
);
}
// Unicode compliant decoder for base64-encoded strings
export function b64DecodeUnicode(str: string) {
return decodeURIComponent(atob(str).split('').map((c) => {
return '%' + ('00' + c.charCodeAt(0).toString(16)).slice(-2); //eslint-disable-line
}).join(''));
}
export function isImageBitmap(image: any): image is ImageBitmap {
return typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap;
} | the_stack |
import defaults from "lodash-es/defaults";
import pick from "lodash-es/pick";
import Stats from "stats.js";
import AnalyserAdapter from "./analyser/AnalyserAdapter";
import builtinResourcePack from "./builtinResourcePack";
import Component from "./Component";
import ComponentRegistry from "./ComponentRegistry";
import EffectList from "./EffectList";
import IMain from "./IMain";
import BufferSave from "./misc/BufferSave";
import GlobalVar from "./misc/GlobalVar";
import Model from "./Model";
import ClearScreen from "./render/ClearScreen";
import MovingParticle from "./render/MovingParticle";
import Picture from "./render/Picture";
import SuperScope from "./render/SuperScope";
import Texer from "./render/Texer";
import ResourceManager from "./ResourceManager";
import ChannelShift from "./trans/ChannelShift";
import ColorClip from "./trans/ColorClip";
import ColorMap from "./trans/ColorMap";
import Convolution from "./trans/Convolution";
import DynamicMovement from "./trans/DynamicMovement";
import FadeOut from "./trans/FadeOut";
import Invert from "./trans/Invert";
import Mirror from "./trans/Mirror";
import Mosaic from "./trans/Mosaic";
import UniqueTone from "./trans/UniqueTone";
import {checkRequiredOptions} from "./utils";
import Buffer from "./webgl/Buffer";
import CopyProgram from "./webgl/CopyProgram";
import RenderingContext from "./webgl/RenderingContext";
import TextureSetManager from "./webgl/TextureSetManager";
declare var WEBVS_VERSION: string;
/**
* Options for Main constructor
*/
export interface IMainOpts {
/**
* Canvas element in which the visualization will be rendered
*/
canvas: HTMLCanvasElement;
/**
* an analyser that will provide music data for the visualizations
*/
analyser: AnalyserAdapter;
/**
* show or hide performance stats
*/
showStat?: boolean;
/**
* Override baked-in resource url prefix for builtin resources
*/
resourcePrefix?: string;
/**
* Custom requestAnimationFrame. Useful for testing/custom frame-rate control.
* Use together with [[cancelAnimationFrame]]
*/
requestAnimationFrame?: (callback: () => void) => any;
/**
* Custom cancelAnimationFrame. Useful for testing/custom frame-rate control.
* Use together with [[requestAnimationFrame]]
*/
cancelAnimationFrame?: (reqId: any) => void;
}
/**
* Main is the primary interface that controls loading of presets, starting stopping animations, etc.
* It maintains the root Component and the hierarchy of components under it.
* A typical usage involves creating an Analyser and a Main object. The Analyser interfaces with your
* audio source and generates the visualization data, while the Main object serves as the primary
* interface for controlling the visualization. E.g:
* ```
* const analyser = new Webvs.WebAudioAnalyser();
* const webvs = new Webvs.Main({
* canvas: document.getElementById("canvas"),
* analyser: analyser,
* showStat: true
* });
* webvs.loadPreset({
* "clearFrame": true,
* "components": [
* {
* "type": "SuperScope",
* "source": "WAVEFORM",
* "code": {
* "perPoint": "x=i*2-1;y=v;"
* },
* "colors": ["#ffffff"]
* }
* ]
* });
* webvs.start();
* analyser.load("music.ogg");
* analyser.play();
* ```
*/
export default class Main extends Model implements IMain {
/**
* version of Webvs library
*/
public static version: string = WEBVS_VERSION;
private analyser: AnalyserAdapter;
private rsrcMan: ResourceManager;
private rctx: RenderingContext;
private copier: CopyProgram;
private componentRegistry: ComponentRegistry;
private tempTSM: TextureSetManager;
private registerBank: {[key: string]: number};
private bootTime: number;
private canvas: HTMLCanvasElement;
private isStarted: boolean;
private stats: Stats;
private meta: any;
private rootComponent: Component;
private animReqId: number;
private buffers: {[name: string]: Buffer};
private requestAnimationFrame: (callback: () => void) => any;
private cancelAnimationFrame: (reqId: any) => void;
private presetResourceKeys: string[] = [];
private contextLostHander: (event: any) => void;
private contextRestoredHander: (event: any) => void;
/**
* Constructs a Webvs Main object that can load and render visualization presets
* @param options options for Main
*/
constructor(options: IMainOpts) {
super();
checkRequiredOptions(options, ["canvas", "analyser"]);
options = {
showStat: false,
...options,
};
this.canvas = options.canvas;
this.analyser = options.analyser;
this.isStarted = false;
if (options.requestAnimationFrame && options.cancelAnimationFrame) {
this.requestAnimationFrame = options.requestAnimationFrame;
this.cancelAnimationFrame = options.cancelAnimationFrame;
} else {
this.requestAnimationFrame = window.requestAnimationFrame.bind(window);
this.cancelAnimationFrame = window.cancelAnimationFrame.bind(window);
}
if (options.showStat) {
const stats = new Stats();
stats.setMode(0);
stats.domElement.style.position = "absolute";
stats.domElement.style.right = "5px";
stats.domElement.style.bottom = "5px";
document.body.appendChild(stats.domElement);
this.stats = stats;
}
this.meta = {};
this.buffers = {};
this._initComponentRegistry();
this._initResourceManager(options.resourcePrefix || "");
this._registerContextEvents();
this._initGl();
this._setupRoot({id: "root"});
}
/**
* Starts running the animation when ready. The animation may not start
* playing immediately because preset may use external resources which
* needs to be loaded asynchronously by the resource manager.
*/
public start() {
if (this.isStarted) {
return;
}
this.isStarted = true;
if (this.rsrcMan.ready) {
this._startAnimation();
}
}
/**
* Stops the animation
*/
public stop() {
if (!this.isStarted) {
return;
}
this.isStarted = false;
if (this.rsrcMan.ready) {
this._stopAnimation();
}
}
/**
* Loads a preset into this webvs main instance.
*
* @param preset an object that contains the preset. The root object should
* have a `components` property which will contain an Array for component configurations
* for all the components. All component configurations should have a
* `type` property containing the string name of the Component. Other
* properties are specific to each component. The `resources.uris` property
* in preset is used to register resources with [[ResourceManager]] and has
* the same format accepted by the [[ResourceManager.registerUri]].
*/
public loadPreset(preset: any) {
preset = Object.assign({}, preset); // use our own copy
preset.id = "root";
this.rootComponent.destroy();
// setup resources
this.rsrcMan.clear(this.presetResourceKeys);
if ("resources" in preset && "uris" in preset.resources) {
this.rsrcMan.registerUri(preset.resources.uris);
this.presetResourceKeys = Object.keys(preset.resources.uris);
} else {
this.presetResourceKeys = [];
}
// load meta
this.meta = Object.assign({}, preset.meta);
this._setupRoot(preset);
}
/**
* Resets and reinitializes all the components and canvas.
*/
public resetCanvas() {
const preset = this.rootComponent.toJSON();
this.rootComponent.destroy();
this.tempTSM.destroy();
this._initGl();
this._setupRoot(preset);
}
/**
* This function should be called if the canvas element's
* width or height attribute has changed. This allows Webvs
* to update and resize all the buffers.
*/
public notifyResize() {
const gl = this.rctx.getGl();
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
this.tempTSM.resize();
this.emit("resize", gl.drawingBufferWidth, gl.drawingBufferHeight);
}
/**
* Cache webgl Buffers. Useful to store buffers that can be shared. e.g. geometries.
* @param name Name of the buffer
* @param buffer Buffer to be cached
*/
public cacheBuffer(name: string, buffer: Buffer) {
this.buffers[name] = buffer;
}
/**
* Returns buffer cached under given name
* @param name Name of the buffer
* @returns The buffer cached under given name. `undefined` if not found.
*/
public getBuffer(name: string): Buffer {
return this.buffers[name];
}
/**
* Gets the current value of a preset property. Eg. `main.get("meta")`
* @param key preset property to be returned
*/
public get(key: string) {
if (key === "meta") {
return this.meta;
}
}
/**
* Generates and returns the instantaneous preset JSON representation
* @returns JSON representaton of the preset
*/
public toJSON(): any {
let preset = this.rootComponent.toJSON();
preset = pick(preset, "clearFrame", "components");
preset.resources = this.rsrcMan.toJSON();
preset.meta = Object.assign({}, this.meta);
return preset;
}
/**
* Destroys and cleans up all resources
*/
public destroy() {
this.stop();
this.rootComponent.destroy();
this.rootComponent = null;
this.copier.destroy();
for (const bufName in this.buffers) {
if (!this.buffers.hasOwnProperty(bufName)) {
continue;
}
this.buffers[bufName].destroy();
}
this.tempTSM.destroy();
this.tempTSM = null;
this.rctx.destroy();
this.rctx = null;
if (this.stats) {
const statsDomElement = this.stats.domElement;
statsDomElement.parentNode.removeChild(statsDomElement);
this.stats = null;
}
this.rsrcMan = null;
this.stopListening();
this.canvas.removeEventListener("webglcontextlost", this.contextLostHander);
this.canvas.removeEventListener("webglcontextrestored", this.contextRestoredHander);
}
/**
* Returns the rendering context for webgl rendering
*/
public getRctx(): RenderingContext { return this.rctx; }
/**
* Returns the Resource Manager that manages media resources
*/
public getRsrcMan(): ResourceManager { return this.rsrcMan; }
/**
* Returns A shader program that can be used to copy frames
*/
public getCopier(): CopyProgram { return this.copier; }
/**
* Returns the analyser instance that's used to get music data
* for the visualization
*/
public getAnalyser(): AnalyserAdapter { return this.analyser; }
/**
* Returns a registry of [[Component]] classes that will be used
* to create preset effects
*/
public getComponentRegistry(): ComponentRegistry { return this.componentRegistry; }
/**
* Returns a TextureSetManager for global temporary buffers, that can
* be shared between components.
*/
public getTempTSM(): TextureSetManager { return this.tempTSM; }
/**
* Returns register bank, a map of shared register values available
* in EEL code in components.
*/
public getRegisterBank(): {[key: string]: number} {return this.registerBank; }
/**
* Returns the timestamp at which this instance was constructed
*/
public getBootTime(): number {return this.bootTime; }
protected setAttribute(key: string, value: any, options: any) {
if (key === "meta") {
this.meta = value;
return true;
}
return false;
}
// event handlers
private handleRsrcWait() {
if (this.isStarted) {
this._stopAnimation();
}
}
private handleRsrcReady() {
if (this.isStarted) {
this._startAnimation();
}
}
private _initComponentRegistry() {
this.componentRegistry = new ComponentRegistry([
EffectList,
ClearScreen,
MovingParticle,
Picture,
SuperScope,
Texer,
ChannelShift,
ColorClip,
ColorMap,
Convolution,
DynamicMovement,
FadeOut,
Invert,
Mirror,
Mosaic,
UniqueTone,
BufferSave,
GlobalVar,
]);
}
private _initResourceManager(prefix: string): void {
let builtinPack = builtinResourcePack;
if (prefix) {
builtinPack = Object.assign({}, builtinPack);
builtinPack.prefix = prefix;
}
this.rsrcMan = new ResourceManager(builtinPack);
this.listenTo(this.rsrcMan, "wait", () => this.handleRsrcWait());
this.listenTo(this.rsrcMan, "ready", () => this.handleRsrcReady());
}
private _registerContextEvents() {
this.contextLostHander = (event) => {
event.preventDefault();
this.stop();
};
this.canvas.addEventListener("webglcontextlost", this.contextLostHander);
this.contextRestoredHander = (event) => {
this.resetCanvas();
};
this.canvas.addEventListener("webglcontextrestored", this.contextRestoredHander);
}
private _initGl() {
try {
const gl = this.canvas.getContext("webgl", {alpha: false});
if (!gl) {
throw new Error("context is falsy");
}
this.rctx = new RenderingContext(gl);
this.copier = new CopyProgram(this.rctx, true);
this.tempTSM = new TextureSetManager(this.rctx, this.copier, true, 0);
} catch (e) {
throw new Error("Couldnt get webgl context" + e);
}
}
private _setupRoot(preset: any) {
this.registerBank = {};
this.bootTime = (new Date()).getTime();
this.rootComponent = new EffectList(this, null, preset);
}
private _startAnimation() {
let drawFrame = () => {
this.analyser.update();
this.rootComponent.draw();
this.animReqId = this.requestAnimationFrame(drawFrame);
};
// Wrap drawframe in stats collection if required
if (this.stats) {
const oldDrawFrame = drawFrame;
drawFrame = () => {
this.stats.begin();
oldDrawFrame();
this.stats.end();
};
}
this.animReqId = this.requestAnimationFrame(drawFrame);
}
private _stopAnimation() {
this.cancelAnimationFrame(this.animReqId);
}
} | the_stack |
import * as DeleteYankChangeAction from "../action/DeleteYankChangeAction";
import * as DeleteYankChangeHighlightedLineAction from "../action/DeleteYankChangeHighlightedLineAction";
import * as DeleteYankChangeHighlightedTextAction from "../action/DeleteYankChangeHighlightedTextAction";
import * as InsertTextAction from "../action/InsertTextAction";
import * as JoinHighlightedLinesAction from "../action/JoinHighlightedLinesAction";
import * as JoinLinesAction from "../action/JoinLinesAction";
import * as OpenNewLineAndAppendTextAction from "../action/OpenNewLineAndAppendTextAction";
import * as PutRegisterAction from "../action/PutRegisterAction";
import * as RepeatLastChangeAction from "../action/RepeatLastChangeAction";
import * as ReplaceCharacterAction from "../action/ReplaceCharacterAction";
import * as ReplaceCharacterOfSelecetdTextAction from "../action/ReplaceCharacterOfSelecetdTextAction";
import * as StartVisualLineModeAction from "../action/StartVisualLineModeAction";
import * as StartVisualModeAction from "../action/StartVisualModeAction";
import * as BrancketMotion from "../motion/BrancketMotion";
import * as ChangeWordMotion from "../motion/ChangeWordMotion";
import * as DeleteEndOfWordMotion from "../motion/DeleteEndOfWordMotion";
import * as DownMotion from "../motion/DownMotion";
import * as FindCharacterMotion from "../motion/FindCharacterMotion";
import * as FirstCharacterInLineMotion from "../motion/FirstCharacterInLineMotion";
import * as FirstCharacterMotion from "../motion/FirstCharacterMotion";
import * as LastCharacterInLineMotion from "../motion/LastCharacterInLineMotion";
import * as MoveWordMotion from "../motion/MoveWordMotion";
import * as ParagraphMotion from "../motion/ParagraphMotion";
import * as RightMotion from "../motion/RightMotion";
import * as TextObjectSelectionBrancket from "../motion/textObjectSelection/Brancket";
import * as TextObjectQuotation from "../motion/textObjectSelection/Quotation";
import * as WordMotion from "../motion/WordMotion";
class KeyBindings implements IKeyBindings {
public AtStart: { [key: string]: IVimStyleCommand };
public FirstNum: { [key: string]: IVimStyleCommand };
public RequireMotion: { [key: string]: IVimStyleCommand };
public RequireMotionNum: { [key: string]: IVimStyleCommand };
public RequireBrancketForLeftBrancket: { [key: string]: IVimStyleCommand };
public RequireBrancketForRightBrancket: { [key: string]: IVimStyleCommand };
public RequireBrancketForLeftBrancketMotion: { [key: string]: IVimStyleCommand };
public RequireBrancketForRightBrancketMotion: { [key: string]: IVimStyleCommand };
public RequireInnerTextObject: { [key: string]: IVimStyleCommand };
public RequireOuterTextObject: { [key: string]: IVimStyleCommand };
public SmallG: { [key: string]: IVimStyleCommand };
public SmallGForMotion: { [key: string]: IVimStyleCommand };
public VisualMode: { [key: string]: IVimStyleCommand };
public VisualLineMode: { [key: string]: IVimStyleCommand };
}
function applyKeyBindingsByEachState(
dest: { [key: string]: IVimStyleCommand }, src: { [key: string]: IVimStyleCommand }) {
let key: string;
for (key in src) {
dest[key] = src[key];
}
}
export function ApplyKeyBindings(dest: IKeyBindings, src: IKeyBindings) {
if (dest.AtStart) {
applyKeyBindingsByEachState(dest.AtStart, src.AtStart);
}
if (dest.FirstNum) {
applyKeyBindingsByEachState(dest.FirstNum, src.AtStart);
applyKeyBindingsByEachState(dest.FirstNum, src.FirstNum);
}
if (dest.RequireMotion) {
applyKeyBindingsByEachState(dest.RequireMotion, src.RequireMotion);
}
if (dest.RequireMotionNum) {
applyKeyBindingsByEachState(dest.RequireMotionNum, src.RequireMotion);
applyKeyBindingsByEachState(dest.RequireMotionNum, src.RequireMotionNum);
}
if (dest.RequireBrancketForLeftBrancket) {
applyKeyBindingsByEachState(dest.RequireBrancketForLeftBrancket, src.RequireBrancketForLeftBrancket);
}
if (dest.RequireBrancketForRightBrancket) {
applyKeyBindingsByEachState(dest.RequireBrancketForRightBrancket, src.RequireBrancketForRightBrancket);
}
if (dest.RequireBrancketForLeftBrancketMotion) {
applyKeyBindingsByEachState(dest.RequireBrancketForLeftBrancketMotion,
src.RequireBrancketForLeftBrancketMotion);
}
if (dest.RequireBrancketForRightBrancketMotion) {
applyKeyBindingsByEachState(dest.RequireBrancketForRightBrancketMotion,
src.RequireBrancketForRightBrancketMotion);
}
if (dest.RequireInnerTextObject) {
applyKeyBindingsByEachState(dest.RequireInnerTextObject, src.RequireInnerTextObject);
}
if (dest.RequireOuterTextObject) {
applyKeyBindingsByEachState(dest.RequireOuterTextObject, src.RequireOuterTextObject);
}
if (dest.SmallG) {
applyKeyBindingsByEachState(dest.SmallG, src.SmallG);
}
if (dest.SmallGForMotion) {
applyKeyBindingsByEachState(dest.SmallGForMotion, src.SmallGForMotion);
}
if (dest.VisualMode) {
applyKeyBindingsByEachState(dest.VisualMode, src.VisualMode);
}
if (dest.VisualModeNum) {
applyKeyBindingsByEachState(dest.VisualModeNum, src.VisualMode);
applyKeyBindingsByEachState(dest.VisualModeNum, src.VisualModeNum);
}
if (dest.VisualLineMode) {
applyKeyBindingsByEachState(dest.VisualLineMode, src.VisualLineMode);
}
}
const DefaultKeyBindings: IKeyBindings = {
AtStart: {
"a": {
CreateAction: InsertTextAction.AppendTextAfterCursor,
},
"A": {
CreateAction: InsertTextAction.AppendTextAtEndOfLine,
},
"b": {
CreateAction: WordMotion.GotoWordBackword,
},
"B": {
CreateAction: WordMotion.GotoBlankSeparatedBackwordWord,
},
"c": {
CreateAction: DeleteYankChangeAction.ChangeTextWithMotion,
state: StateName.RequireMotion,
},
"C": {
CreateAction: DeleteYankChangeAction.ChangeTextToEndOfLine,
},
"d": {
CreateAction: DeleteYankChangeAction.DeleteTextWithMotion,
state: StateName.RequireMotion,
},
"D": {
CreateAction: DeleteYankChangeAction.DeleteTextToEndOfLine,
},
"e": {
CreateAction: DeleteEndOfWordMotion.GotoForwardToEndOfWold,
},
"E": {
CreateAction: DeleteEndOfWordMotion.GotoForwardToEndOfBlankSeparated,
},
"f": {
CreateAction: FindCharacterMotion.GotoCharacterToRight,
state: StateName.RequireCharForMotion,
},
"F": {
CreateAction: FindCharacterMotion.GotoCharacterToLeft,
state: StateName.RequireCharForMotion,
},
"g": {
cmd: VimCommand.nothing,
state: StateName.SmallG,
},
"G": {
CreateAction: FirstCharacterMotion.GotoLastLine,
},
"h": {
CreateAction: RightMotion.GotoLeft,
},
// H no function
"i": {
CreateAction: InsertTextAction.InsertTextBeforeCursor,
},
"I": {
CreateAction: InsertTextAction.InsertTextBeforeFirstNonBlankInLine,
},
"j": {
CreateAction: DownMotion.GoDown,
},
"J": {
CreateAction: JoinLinesAction.JoinLines,
},
"k": {
CreateAction: DownMotion.GoUp,
},
// K no function
"l": {
CreateAction: RightMotion.GotoRight,
},
// L no function
"o": {
CreateAction: OpenNewLineAndAppendTextAction.OpenNewLineBelowCurrentLineAndAppendText,
},
"O": {
CreateAction: OpenNewLineAndAppendTextAction.OpenNewLineAboveCurrentLineAndAppendText,
},
"p": {
CreateAction: PutRegisterAction.PutRegisterAfterCursorPosition,
},
"P": {
CreateAction: PutRegisterAction.PutRegisterBeforeCursorPosition,
},
// q low priority
// Q never support
"r": {
CreateAction: ReplaceCharacterAction.ReplaceCharacter,
state: StateName.RequireCharForAction,
},
// R low priority
"s": {
CreateAction: DeleteYankChangeAction.ChangeCharacters,
},
"S": {
CreateAction: DeleteYankChangeAction.ChangeLines,
},
"t": {
CreateAction: FindCharacterMotion.GoTillBeforeCharacterToRight,
state: StateName.RequireCharForMotion,
},
"T": {
CreateAction: FindCharacterMotion.GoTillBeforeCharacterToLeft,
state: StateName.RequireCharForMotion,
},
// u low priority
// U low priority
"v": {
CreateAction: StartVisualModeAction.StartVisualMode,
},
"V": {
CreateAction: StartVisualLineModeAction.StartVisualLineMode,
},
"w": {
CreateAction: MoveWordMotion.GotoWordFoword,
},
"W": {
CreateAction: MoveWordMotion.GotoBlankSeparated,
},
"x": {
CreateAction: DeleteYankChangeAction.DeleteCharactersUnderCursor,
},
"X": {
CreateAction: DeleteYankChangeAction.DeleteCharactersBeforeCursor,
},
"y": {
CreateAction: DeleteYankChangeAction.YankTextWithMotion,
state: StateName.RequireMotion,
},
"Y": {
CreateAction: DeleteYankChangeAction.YankLine,
},
// z never suppoer
// Z no function
"0": {
CreateAction: FirstCharacterInLineMotion.GotoFirstCharacterInLine,
},
"1": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"2": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"3": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"4": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"5": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"6": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"7": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"8": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"9": {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
"$": {
CreateAction: LastCharacterInLineMotion.GotoLastCharacterInLine,
},
".": {
CreateAction: RepeatLastChangeAction.RepeatLastChange,
},
",": {
CreateAction: FindCharacterMotion.GotoRepeatCharacterOppositeDirection,
},
";": {
CreateAction: FindCharacterMotion.GotoRepeatCharacter,
},
"^": {
CreateAction: FirstCharacterMotion.GotoFirstNonBlankCharacterInLine,
},
"[": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForLeftBrancket,
},
"]": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForRightBrancket,
},
"{": {
CreateAction: ParagraphMotion.GotoParagraphBackword,
},
"}": {
CreateAction: ParagraphMotion.GotoParagraphFoword,
},
"<Up>": {
CreateAction: DownMotion.GoUp,
},
"<Down>": {
CreateAction: DownMotion.GoDown,
},
"<Left>": {
CreateAction: RightMotion.GotoLeft,
},
"<Right>": {
CreateAction: RightMotion.GotoRight,
},
},
// Nx
FirstNum: {
0: {
cmd: VimCommand.stackNumber,
state: StateName.FirstNum,
},
},
// cm
RequireMotion: {
"a": {
state: StateName.RequireOuterTextObject,
},
// dA
"b": {
AddMotion: WordMotion.AddWordBackwardMotion,
},
"B": {
AddMotion: WordMotion.AddBlankSeparatedBackwordMotion,
},
"c": {
CreateAction: DeleteYankChangeAction.ChangeCurrentLine,
},
// C no command
"d": {
CreateAction: DeleteYankChangeAction.DeleteCurrentLine,
},
// D no command
"e": {
AddMotion: DeleteEndOfWordMotion.AddEndOfWordMotion,
},
"E": {
AddMotion: DeleteEndOfWordMotion.AddEndOfBlankSeparatedMotion,
},
"f": {
AddMotion: FindCharacterMotion.AddCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"F": {
AddMotion: FindCharacterMotion.AddCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
"g": {
cmd: VimCommand.nothing,
state: StateName.SmallGForMotion,
},
"G": {
AddMotion: FirstCharacterMotion.AddLastLineMotion,
},
"h": {
AddMotion: RightMotion.AddLeftMotion,
},
// H no function
"i": {
state: StateName.RequireInnerTextObject,
},
// I
"j": {
AddMotion: DownMotion.AddDownMotion,
},
// J
"k": {
AddMotion: DownMotion.AddUpMotion,
},
// K no function
"l": {
AddMotion: RightMotion.AddRightMotion,
},
// L no function
// o never support
// O no function
// p never support
// P no function
// q no function
// Q no function
// r no function
// R low priority
// s ?
// S ?
"t": {
AddMotion: FindCharacterMotion.AddTillCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"T": {
AddMotion: FindCharacterMotion.AddTillCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
// u low priority
// U low priority
// v low priority
// V low priority
"w": {
AddMotion: ChangeWordMotion.AddWordForwordMotion,
},
"W": {
AddMotion: ChangeWordMotion.AddBlankSparatedMotion,
},
// x no function
// X no function
"y": {
CreateAction: DeleteYankChangeAction.YankCurrentLine,
},
// Y no command
// z never suppoer
// Z no function
"0": {
AddMotion: FirstCharacterInLineMotion.AddFirstCharacterInLineMotion,
},
"1": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"2": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"3": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"4": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"5": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"6": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"7": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"8": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"9": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"$": {
AddMotion: LastCharacterInLineMotion.AddLastCharacterInLineMotion,
},
",": {
AddMotion: FindCharacterMotion.AddRepeartCharacterMotionOppositeDirection,
},
";": {
AddMotion: FindCharacterMotion.AddRepeartCharacterMotion,
},
"^": {
AddMotion: FirstCharacterMotion.AddFirstNonBlankCharacterInLineMotion,
},
"{": {
AddMotion: ParagraphMotion.AddParagraphBackwordMotion,
},
"}": {
AddMotion: ParagraphMotion.AddParagraphFowordMotion,
},
"[": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForLeftBrancketMotion,
},
"]": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForRightBrancketMotion,
},
"<Up>": {
AddMotion: DownMotion.AddUpMotion,
},
"<Down>": {
AddMotion: DownMotion.AddDownMotion,
},
"<Left>": {
AddMotion: RightMotion.AddLeftMotion,
},
"<Right>": {
AddMotion: RightMotion.AddRightMotion,
},
},
// cNm
RequireMotionNum: {
0: {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
},
RequireBrancketForLeftBrancket: {
// [(
"(": {
CreateAction: BrancketMotion.GoBackToUnclosedLeftParenthesis,
},
// [{
"{": {
CreateAction: BrancketMotion.GoBackToUnclosedLeftCurlyBracket,
},
},
RequireBrancketForLeftBrancketMotion: {
// c[(
"(": {
AddMotion: BrancketMotion.AddBackToUnclosedLeftParenthesisMotion,
},
// c[{
"{": {
AddMotion: BrancketMotion.AddBackToUnclosedLeftCurlyBracketMotion,
},
},
RequireBrancketForRightBrancket: {
// ])
")": {
CreateAction: BrancketMotion.GoToUnclosedRightParenthesis,
},
// ]}
"}": {
CreateAction: BrancketMotion.GoToUnclosedRightCurlyBracket,
},
},
RequireBrancketForRightBrancketMotion: {
// c])
")": {
AddMotion: BrancketMotion.AddToUnclosedRightParenthesisMotion,
},
// c]}
"}": {
AddMotion: BrancketMotion.AddToUnclosedRightCurlyBracketMotion,
},
},
RequireInnerTextObject: {
"(": {
AddMotion: TextObjectSelectionBrancket.AddInnerUnclosedParenthesisSelection,
},
")": {
AddMotion: TextObjectSelectionBrancket.AddInnerUnclosedParenthesisSelection,
},
"<": {
AddMotion: TextObjectSelectionBrancket.AddInnerLessThanSignSelection,
},
">": {
AddMotion: TextObjectSelectionBrancket.AddInnerLessThanSignSelection,
},
"[": {
AddMotion: TextObjectSelectionBrancket.AddInnerSquareBlancketSelection,
},
"]": {
AddMotion: TextObjectSelectionBrancket.AddInnerSquareBlancketSelection,
},
"{": {
AddMotion: TextObjectSelectionBrancket.AddInnerCurlyBrancketSelection,
},
"}": {
AddMotion: TextObjectSelectionBrancket.AddInnerCurlyBrancketSelection,
},
"'": {
AddMotion:
TextObjectQuotation.AddInnerApostropheSelection,
},
"\"": {
AddMotion:
TextObjectQuotation.AddInnerQuotationSelection,
},
"`": {
AddMotion:
TextObjectQuotation.AddInnerGraveAccentSelection,
},
},
RequireOuterTextObject: {
"(": {
AddMotion: TextObjectSelectionBrancket.AddOuterUnclosedParenthesisSelection,
},
")": {
AddMotion: TextObjectSelectionBrancket.AddOuterUnclosedParenthesisSelection,
},
"<": {
AddMotion: TextObjectSelectionBrancket.AddOuterLessThanSignSelection,
},
">": {
AddMotion: TextObjectSelectionBrancket.AddOuterLessThanSignSelection,
},
"[": {
AddMotion: TextObjectSelectionBrancket.AddOuterSquareBlancketSelection,
},
"]": {
AddMotion: TextObjectSelectionBrancket.AddOuterSquareBlancketSelection,
},
"{": {
AddMotion: TextObjectSelectionBrancket.AddOuterCurlyBrancketSelection,
},
"}": {
AddMotion: TextObjectSelectionBrancket.AddOuterCurlyBrancketSelection,
},
"'": {
AddMotion:
TextObjectQuotation.AddOuterApostropheSelection,
},
"\"": {
AddMotion:
TextObjectQuotation.AddOuterQuotationSelection,
},
"`": {
AddMotion:
TextObjectQuotation.AddOuterGraveAccentSelection,
},
},
// g
SmallG: {
g: {
CreateAction: FirstCharacterMotion.GotoFirstLineOnFirstNonBlankCharacter,
},
r: {
CreateAction: ReplaceCharacterAction.ReplaceCharacterWithoutAffectingLayout,
state: StateName.RequireCharForAction,
},
},
// cg
SmallGForMotion: {
g: {
AddMotion: FirstCharacterMotion.AddLastLineMotion,
},
},
// v
VisualMode: {
// v..a
// v..A
"b": {
AddMotion: WordMotion.AddWordBackwardMotion,
},
"B": {
AddMotion: WordMotion.AddBlankSeparatedBackwordMotion,
},
"c": {
CreateAction: DeleteYankChangeHighlightedTextAction.ChangeHighlightedText,
},
// C no command
"d": {
CreateAction: DeleteYankChangeHighlightedTextAction.DeleteHighlightedText,
},
// D no command
"e": {
AddMotion: DeleteEndOfWordMotion.AddMoveToForwardToEndOfWoldMotion,
},
"E": {
AddMotion: DeleteEndOfWordMotion.AddMoveToForwardToEndOfBlankSeparatedMotion,
},
"f": {
AddMotion: FindCharacterMotion.AddVisualGotoCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"F": {
AddMotion: FindCharacterMotion.AddVisualGotoCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
"g": {
cmd: VimCommand.nothing,
state: StateName.SmallGForMotion,
},
"G": {
AddMotion: FirstCharacterMotion.AddLastLineMotion,
},
"h": {
AddMotion: RightMotion.AddLeftMotion,
},
// H no function
// v..i
// v..I
"j": {
AddMotion: DownMotion.AddDownMotion,
},
"J": {
CreateAction: JoinHighlightedLinesAction.JoinHighlightedText,
},
"k": {
AddMotion: DownMotion.AddUpMotion,
},
// K no function
"l": {
AddMotion: RightMotion.AddRightMotion,
},
// l no function
// o never support
// O no function
// p never support
// P no function
// q no function
// Q no function
"r": {
CreateAction: ReplaceCharacterOfSelecetdTextAction.ReplaceCharacterOfSelectedText,
state: StateName.RequireCharForAction,
},
// R low priority
"s": {
CreateAction: DeleteYankChangeHighlightedTextAction.ChangeHighlightedText,
},
// S ?
"t": {
AddMotion: FindCharacterMotion.AddVisualGoTillCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"T": {
AddMotion: FindCharacterMotion.AddVisualGoTillCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
// u low priority
// U low priority
// v low priority
// V low priority
"w": {
AddMotion: MoveWordMotion.AddToWordFowordMotion,
},
"W": {
AddMotion: MoveWordMotion.AddToBlankSeparatedMotion,
},
"x": {
CreateAction: DeleteYankChangeHighlightedTextAction.DeleteHighlightedText,
},
// X no function
"y": {
CreateAction: DeleteYankChangeHighlightedTextAction.YankHighlightedText,
},
"0": {
AddMotion: FirstCharacterInLineMotion.AddFirstCharacterInLineMotion,
},
"1": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"2": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"3": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"4": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"5": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"6": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"7": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"8": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"9": {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
"$": {
AddMotion: LastCharacterInLineMotion.AddLastCharacterInLineMotion,
},
",": {
AddMotion: FindCharacterMotion.AddVisualGotoRepeartCharacterMotionOppositeDirection,
},
";": {
AddMotion: FindCharacterMotion.AddVisualGotoRepeartCharacterMotion,
},
"{": {
AddMotion: ParagraphMotion.AddParagraphBackwordMotion,
},
"}": {
AddMotion: ParagraphMotion.AddParagraphFowordMotion,
},
"[": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForLeftBrancketMotion,
},
"]": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForRightBrancketMotion,
},
"<Up>": {
AddMotion: DownMotion.AddUpMotion,
},
"<Down>": {
AddMotion: DownMotion.AddDownMotion,
},
"<Left>": {
AddMotion: RightMotion.AddLeftMotion,
},
"<Right>": {
AddMotion: RightMotion.AddRightMotion,
},
},
// vN
VisualModeNum: {
0: {
cmd: VimCommand.stackNumber,
state: StateName.VisualModeNum,
},
},
// V
VisualLineMode: {
// V..a
// V..A
"b": {
AddMotion: WordMotion.AddWordBackwardMotion,
},
"B": {
AddMotion: WordMotion.AddBlankSeparatedBackwordMotion,
},
"c": {
CreateAction: DeleteYankChangeHighlightedLineAction.ChangeHighligtedLine,
},
"C": {
CreateAction: DeleteYankChangeHighlightedLineAction.ChangeHighligtedLine,
},
"d": {
CreateAction: DeleteYankChangeHighlightedLineAction.DeleteHighlightedLine,
},
"D": {
CreateAction: DeleteYankChangeHighlightedLineAction.DeleteHighlightedLine,
},
// V..e
// V..E
"f": {
AddMotion: FindCharacterMotion.AddCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"F": {
AddMotion: FindCharacterMotion.AddCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
"g": {
cmd: VimCommand.nothing,
state: StateName.SmallGForMotion,
},
"G": {
AddMotion: FirstCharacterMotion.AddLastLineMotion,
},
// V..h
// V..H no function
// V..i
// V..I
"j": {
AddMotion: DownMotion.AddDownMotion,
},
"J": {
CreateAction: JoinHighlightedLinesAction.JoinHighlightedLines,
},
// V..J?
"k": {
AddMotion: DownMotion.AddUpMotion,
},
// V..K no function
// V..l
// V..L no function
// V..o never support
// V..O no function
// V..p never support
// V..P no function
// V..q no function
// V..Q no function
// V..r no function
// V..R low priority
// V..s ?
// V..S ?
"t": {
AddMotion: FindCharacterMotion.AddCharacterToRightMotion,
state: StateName.RequireCharForMotion,
},
"T": {
AddMotion: FindCharacterMotion.AddTillCharacterToLeftMotion,
state: StateName.RequireCharForMotion,
},
// u low priority
// U low priority
// V..v low priority
// V..V back to normal mode
// V..w
// v..W
"x": {
CreateAction: DeleteYankChangeHighlightedLineAction.DeleteHighlightedLine,
},
"X": {
CreateAction: DeleteYankChangeHighlightedLineAction.DeleteHighlightedLine,
},
"y": {
CreateAction: DeleteYankChangeHighlightedLineAction.YankHighlightedLine,
},
"Y": {
CreateAction: DeleteYankChangeHighlightedLineAction.YankHighlightedLine,
},
"0": {
AddMotion: FirstCharacterInLineMotion.AddFirstCharacterInLineMotion,
},
"1": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"2": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"3": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"4": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"5": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"6": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"7": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"8": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"9": {
cmd: VimCommand.stackNumber,
state: StateName.RequireMotionNum,
},
"$": {
AddMotion: LastCharacterInLineMotion.AddLastCharacterInLineMotion,
},
",": {
AddMotion: FindCharacterMotion.AddRepeartCharacterMotionOppositeDirection,
},
";": {
AddMotion: FindCharacterMotion.AddRepeartCharacterMotion,
},
"{": {
AddMotion: ParagraphMotion.AddParagraphBackwordMotion,
},
"}": {
AddMotion: ParagraphMotion.AddParagraphFowordMotion,
},
"[": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForLeftBrancket,
},
"]": {
cmd: VimCommand.nothing,
state: StateName.RequireBrancketForRightBrancket,
},
"<Up>": {
AddMotion: DownMotion.AddUpMotion,
},
"<Down>": {
AddMotion: DownMotion.AddDownMotion,
},
"<Left>": {
AddMotion: RightMotion.AddLeftMotion,
},
"<Right>": {
AddMotion: RightMotion.AddRightMotion,
},
},
};
// move a cursur by jkl; keys
const ErgonomicKeyBindings: IKeyBindings = {
AtStart: {
"j": DefaultKeyBindings.AtStart.h,
"k": DefaultKeyBindings.AtStart.j,
"l": DefaultKeyBindings.AtStart.k,
";": DefaultKeyBindings.AtStart.l,
},
RequireMotion: {
"j": DefaultKeyBindings.RequireMotion.h,
"k": DefaultKeyBindings.RequireMotion.j,
"l": DefaultKeyBindings.RequireMotion.k,
";": DefaultKeyBindings.RequireMotion.l,
},
VisualMode: {
"j": DefaultKeyBindings.VisualMode.h,
"k": DefaultKeyBindings.VisualMode.j,
"l": DefaultKeyBindings.VisualMode.k,
";": DefaultKeyBindings.VisualMode.l,
},
VisualModeNum: {
"j": DefaultKeyBindings.VisualMode.h,
"k": DefaultKeyBindings.VisualMode.j,
"l": DefaultKeyBindings.VisualMode.k,
";": DefaultKeyBindings.VisualMode.l,
},
VisualLineMode: {
"j": DefaultKeyBindings.VisualMode.h,
"k": DefaultKeyBindings.VisualMode.j,
"l": DefaultKeyBindings.VisualMode.k,
";": DefaultKeyBindings.VisualMode.l,
},
};
export function LoadKeyBindings(opts: IVimStyleOptions): IKeyBindings {
let bindings: IKeyBindings = {
AtStart: {},
FirstNum: {},
RequireMotion: {},
RequireMotionNum: {},
RequireBrancketForLeftBrancket: {},
RequireBrancketForLeftBrancketMotion: {},
RequireBrancketForRightBrancket: {},
RequireBrancketForRightBrancketMotion: {},
RequireInnerTextObject: {},
RequireOuterTextObject: {},
SmallG: {},
SmallGForMotion: {},
VisualMode: {},
VisualModeNum: {},
VisualLineMode: {},
};
let key: string;
ApplyKeyBindings(bindings, DefaultKeyBindings);
if (opts.useErgonomicKeyForMotion) {
ApplyKeyBindings(bindings, ErgonomicKeyBindings);
}
if (opts.editorKeyBindings) {
ApplyKeyBindings(bindings, opts.editorKeyBindings);
}
return bindings;
} | the_stack |
import config from '../../../../lib/config';
import { getBreakpoint as getBreakpoint_ } from '../../../../lib/detect';
import { isUserLoggedIn as isUserLoggedIn_ } from '../identity/api';
import userPrefs from '../user-prefs';
import { commercialFeatures } from './commercial-features';
import type { CommercialFeaturesConstructor } from './commercial-features';
import {
isAdFreeUser as isAdFreeUser_,
isPayingMember as isPayingMember_,
isRecentOneOffContributor as isRecentOneOffContributor_,
shouldHideSupportMessaging as shouldHideSupportMessaging_,
} from './user-features';
const isPayingMember = isPayingMember_ as jest.MockedFunction<
typeof isPayingMember_
>;
const isRecentOneOffContributor =
isRecentOneOffContributor_ as jest.MockedFunction<
typeof isRecentOneOffContributor_
>;
const shouldHideSupportMessaging =
shouldHideSupportMessaging_ as jest.MockedFunction<
typeof shouldHideSupportMessaging_
>;
const isAdFreeUser = isAdFreeUser_ as jest.MockedFunction<typeof isAdFreeUser_>;
const getBreakpoint = getBreakpoint_ as jest.MockedFunction<
typeof getBreakpoint_
>;
const isUserLoggedIn = isUserLoggedIn_ as jest.MockedFunction<
typeof isUserLoggedIn_
>;
const CommercialFeatures =
commercialFeatures.constructor as CommercialFeaturesConstructor;
jest.mock('./user-features', () => ({
isPayingMember: jest.fn(),
isRecentOneOffContributor: jest.fn(),
shouldHideSupportMessaging: jest.fn(),
isAdFreeUser: jest.fn(),
}));
jest.mock('../../../../lib/detect', () => ({
getBreakpoint: jest.fn(),
}));
jest.mock('../identity/api', () => ({
isUserLoggedIn: jest.fn(),
}));
describe('Commercial features', () => {
beforeEach(() => {
jest.resetAllMocks();
// Set up a happy path by default
config.set('page', {
contentType: 'Article',
isMinuteArticle: false,
section: 'politics',
pageId: 'politics-article',
shouldHideAdverts: false,
shouldHideReaderRevenue: false,
isFront: false,
showRelatedContent: true,
});
config.set('switches', {
commercial: true,
enableDiscussionSwitch: true,
});
window.location.hash = '';
userPrefs.removeSwitch('adverts');
getBreakpoint.mockReturnValue('desktop');
isPayingMember.mockReturnValue(false);
isRecentOneOffContributor.mockReturnValue(false);
shouldHideSupportMessaging.mockReturnValue(false);
isAdFreeUser.mockReturnValue(false);
isUserLoggedIn.mockReturnValue(true);
expect.hasAssertions();
});
describe('DFP advertising', () => {
it('Runs by default', () => {
const features = new CommercialFeatures();
expect(features.dfpAdvertising).toBe(true);
});
it('Is disabled on sensitive pages', () => {
// Like all newspapers, the Guardian must sometimes cover disturbing and graphic content.
// Showing adverts on these pages would be crass - callous, even.
config.set('page.shouldHideAdverts', true);
const features = new CommercialFeatures();
expect(features.dfpAdvertising).toBe(false);
});
it('Is disabled on the children`s book site', () => {
// ASA guidelines prohibit us from showing adverts on anything that might be deemed childrens' content
config.set('page.section', 'childrens-books-site');
const features = new CommercialFeatures();
expect(features.dfpAdvertising).toBe(false);
});
it('Is skipped for speedcurve tests', () => {
// We don't want external dependencies getting in the way of perf tests
window.location.hash = '#noads';
const features = new CommercialFeatures();
expect(features.dfpAdvertising).toBe(false);
});
it('Is disabled for speedcurve tests in ad-free mode', () => {
window.location.hash = '#noadsaf';
const features = new CommercialFeatures();
expect(features.adFree).toBe(true);
expect(features.dfpAdvertising).toBe(false);
});
});
describe('Article body adverts', () => {
it('Runs by default', () => {
const features = new CommercialFeatures();
expect(features.articleBodyAdverts).toBe(true);
});
it('Doesn`t run in minute articles', () => {
config.set('page.isMinuteArticle', true);
const features = new CommercialFeatures();
expect(features.articleBodyAdverts).toBe(false);
});
it('Doesn`t run in non-article pages', () => {
config.set('page.contentType', 'Network Front');
const features = new CommercialFeatures();
expect(features.articleBodyAdverts).toBe(false);
});
it('Doesn`t run in live blogs', () => {
config.set('page.isLiveBlog', true);
const features = new CommercialFeatures();
expect(features.articleBodyAdverts).toBe(false);
});
});
describe('Article body adverts under ad-free', () => {
// LOL grammar
it('are disabled', () => {
isAdFreeUser.mockReturnValue(true);
const features = new CommercialFeatures();
expect(features.articleBodyAdverts).toBe(false);
});
});
describe('High-relevance commercial component', () => {
it('Does not run on fronts', () => {
config.set('page.isFront', true);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(false);
});
it('Does run on outside of fronts', () => {
config.set('page.isFront', false);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(true);
});
it('Does not run on minute articles', () => {
config.set('page.isMinuteArticle', true);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(false);
});
});
describe('High-relevance commercial component under ad-free', () => {
beforeEach(() => {
isAdFreeUser.mockReturnValue(true);
});
it('Does not run on fronts', () => {
config.set('page.isFront', true);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(false);
});
it('Does not run outside of fronts', () => {
config.set('page.isFront', false);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(false);
});
it('Does not run on minute articles', () => {
config.set('page.isMinuteArticle', true);
const features = new CommercialFeatures();
expect(features.highMerch).toBe(false);
});
});
describe('Third party tags', () => {
it('Runs by default', () => {
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(true);
});
it('Does not run on identity pages', () => {
config.set('page.contentType', 'Identity');
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on identity section', () => {
// This is needed for identity pages in the profile subdomain
config.set('page.section', 'identity');
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on the secure contact interactive', () => {
config.set(
'page.pageId',
'help/ng-interactive/2017/mar/17/contact-the-guardian-securely',
);
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on secure contact help page', () => {
config.set(
'page.pageId',
'help/2016/sep/19/how-to-contact-the-guardian-securely',
);
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
});
describe('Third party tags under ad-free', () => {
beforeEach(() => {
isAdFreeUser.mockReturnValue(true);
});
it('Does not run by default', () => {
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on identity pages', () => {
config.set('page.contentType', 'Identity');
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on identity section', () => {
// This is needed for identity pages in the profile subdomain
config.set('page.section', 'identity');
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
it('Does not run on secure contact pages', () => {
config.set(
'page.pageId',
'help/ng-interactive/2017/mar/17/contact-the-guardian-securely',
);
const features = new CommercialFeatures();
expect(features.thirdPartyTags).toBe(false);
});
});
describe('Comment adverts', () => {
beforeEach(() => {
config.set('page.commentable', true);
isUserLoggedIn.mockReturnValue(true);
});
it('Displays when page has comments', () => {
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(true);
});
it('Will also display when the user is not logged in', () => {
isUserLoggedIn.mockReturnValue(false);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(true);
});
it('Does not display on minute articles', () => {
config.set('page.isMinuteArticle', true);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
it('Short circuits when no comments to add adverts to', () => {
config.set('page.commentable', false);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
describe('If live blog', () => {
beforeEach(() => {
config.set('page.isLiveBlog', true);
});
it('Appears if page is wide', () => {
getBreakpoint.mockReturnValue('wide');
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(true);
});
it('Does not appear if page is not wide', () => {
getBreakpoint.mockReturnValue('desktop');
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
});
});
describe('Comment adverts under ad-free', () => {
beforeEach(() => {
config.set('page.commentable', true);
isAdFreeUser.mockReturnValue(true);
});
it('Does not display when page has comments', () => {
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
it('Does not display on minute articles', () => {
config.set('page.isMinuteArticle', true);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
it('Does not appear when user signed out', () => {
isUserLoggedIn.mockReturnValue(false);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
it('Short circuits when no comments to add adverts to', () => {
config.set('page.commentable', false);
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
describe('If live blog', () => {
beforeEach(() => {
config.set('page.isLiveBlog', true);
});
it('Does not appear if page is wide', () => {
getBreakpoint.mockReturnValue('wide');
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
it('Does not appear if page is not wide', () => {
getBreakpoint.mockReturnValue('desktop');
const features = new CommercialFeatures();
expect(features.commentAdverts).toBe(false);
});
});
});
describe('comscore ', () => {
beforeEach(() => {
config.set('switches.comscore', true);
});
it('Runs if switch is on', () => {
const features = new CommercialFeatures();
expect(features.comscore).toBe(true);
});
it('Does not run if switch is off', () => {
config.set('switches.comscore', false);
const features = new CommercialFeatures();
expect(features.comscore).toBe(false);
});
it('Does not run on identity pages', () => {
config.set('page.contentType', 'Identity');
const features = new CommercialFeatures();
expect(features.comscore).toBe(false);
});
it('Does not run on identity section', () => {
// This is needed for identity pages in the profile subdomain
config.set('page.section', 'identity');
const features = new CommercialFeatures();
expect(features.comscore).toBe(false);
});
it('Does not run on the secure contact interactive', () => {
config.set(
'page.pageId',
'help/ng-interactive/2017/mar/17/contact-the-guardian-securely',
);
const features = new CommercialFeatures();
expect(features.comscore).toBe(false);
});
it('Does not run on secure contact help page', () => {
config.set(
'page.pageId',
'help/2016/sep/19/how-to-contact-the-guardian-securely',
);
const features = new CommercialFeatures();
expect(features.comscore).toBe(false);
});
});
}); | the_stack |
import { createElmNodeData, createUniqueNamespace } from "./DataCacheHelper";
import { getDocument, getWindow } from "./EnvUtils";
import { arrForEach, arrIndexOf, isArray, objForEachKey, objKeys } from "./HelperFuncs";
// Added to help with minfication
const strOnPrefix = "on";
const strAttachEvent = "attachEvent";
const strAddEventHelper = "addEventListener";
const strDetachEvent = "detachEvent";
const strRemoveEventListener = "removeEventListener";
const strEvents = "events"
const strVisibilityChangeEvt: string = "visibilitychange";
const strPageHide: string = "pagehide";
const strPageShow: string = "pageshow";
const strUnload: string = "unload";
const strBeforeUnload: string = "beforeunload";
const strPageHideNamespace = createUniqueNamespace("aiEvtPageHide");
const strPageShowNamespace = createUniqueNamespace("aiEvtPageShow");
const rRemoveEmptyNs = /\.[\.]+/g;
const rRemoveTrailingEmptyNs = /[\.]+$/;
let _guid = 1;
interface IEventDetails {
type: string,
ns: string
}
interface IRegisteredEvent {
guid: number;
evtName: IEventDetails;
handler: any,
capture: boolean
}
interface IAiEvents {
[name: string]: IRegisteredEvent[]
}
const elmNodeData = createElmNodeData("events");
const eventNamespace = /^([^.]*)(?:\.(.+)|)/
function _normalizeNamespace(name: string) {
if (name && name.replace) {
return name.replace(/^\s*\.*|\.*\s*$/g, "");
}
return name;
}
function _getEvtNamespace(eventName: string | undefined, evtNamespace?: string | string[] | null): IEventDetails {
if (evtNamespace) {
let theNamespace: string = "";
if (isArray(evtNamespace)) {
theNamespace = "";
arrForEach(evtNamespace, (name) => {
name = _normalizeNamespace(name);
if (name) {
if (name[0] !== ".") {
name = "." + name;
}
theNamespace += name;
}
});
} else {
theNamespace = _normalizeNamespace(evtNamespace);
}
if (theNamespace) {
if (theNamespace[0] !== ".") {
theNamespace = "." + theNamespace;
}
// We may only have the namespace and not an eventName
eventName = (eventName || "") + theNamespace;
}
}
let parsedEvent: any[] = (eventNamespace.exec(eventName || "") || []);
return {
type: parsedEvent[1],
ns: ((parsedEvent[2] || "").replace(rRemoveEmptyNs, ".").replace(rRemoveTrailingEmptyNs, "").split(".").sort()).join(".")
};
}
export interface _IRegisteredEvents {
name: string;
handler: any;
}
/**
* Get all of the registered events on the target object, this is primarily used for testing cleanup but may also be used by
* applications to remove their own events
* @param target - The EventTarget that has registered events
* @param eventName - [Optional] The name of the event to return the registered handlers and full name (with namespaces)
* @param evtNamespace - [Optional] Additional namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace,
* if the eventName also includes a namespace the namespace(s) are merged into a single namespace
*/
export function __getRegisteredEvents(target: any, eventName?: string, evtNamespace?: string | string[]): _IRegisteredEvents[] {
let theEvents: _IRegisteredEvents[] = [];
let eventCache = elmNodeData.get<IAiEvents>(target, strEvents, {}, false);
let evtName = _getEvtNamespace(eventName, evtNamespace);
objForEachKey(eventCache, (evtType, registeredEvents) => {
arrForEach(registeredEvents, (value) => {
if (!evtName.type || evtName.type === value.evtName.type) {
if (!evtName.ns || evtName.ns === evtName.ns) {
theEvents.push({
name: value.evtName.type + (value.evtName.ns ? "." + value.evtName.ns : ""),
handler: value.handler
});
}
}
});
});
return theEvents;
}
// Exported for internal unit testing only
function _getRegisteredEvents(target: any, evtName: string, addDefault: boolean = true): IRegisteredEvent[] {
let aiEvts = elmNodeData.get<IAiEvents>(target, strEvents, {}, addDefault);
let registeredEvents = aiEvts[evtName];
if (!registeredEvents) {
registeredEvents = aiEvts[evtName] = [];
}
return registeredEvents;
}
function _doDetach(obj: any, evtName: IEventDetails, handlerRef: any, useCapture: boolean) {
if (obj && evtName && evtName.type) {
if (obj[strRemoveEventListener]) {
obj[strRemoveEventListener](evtName.type, handlerRef, useCapture);
} else if (obj[strDetachEvent]) {
obj[strDetachEvent](strOnPrefix + evtName.type, handlerRef);
}
}
}
function _doAttach(obj: any, evtName: IEventDetails, handlerRef: any, useCapture: boolean): boolean {
let result = false;
if (obj && evtName && evtName.type && handlerRef) {
if (obj[strAddEventHelper]) {
// all browsers except IE before version 9
obj[strAddEventHelper](evtName.type, handlerRef, useCapture);
result = true;
} else if (obj[strAttachEvent]) {
// IE before version 9
obj[strAttachEvent](strOnPrefix + evtName.type, handlerRef);
result = true;
}
}
return result;
}
function _doUnregister(target: any, events: IRegisteredEvent[], evtName: IEventDetails, unRegFn: (regEvent: IRegisteredEvent) => boolean) {
let idx = events.length;
while(idx--) {
let theEvent: IRegisteredEvent = events[idx];
if (theEvent) {
if (!evtName.ns || evtName.ns === theEvent.evtName.ns) {
if (!unRegFn || unRegFn(theEvent)) {
_doDetach(target, theEvent.evtName, theEvent.handler, theEvent.capture);
// Remove the registered event
events.splice(idx, 1);
}
}
}
}
}
function _unregisterEvents(target: any, evtName: IEventDetails, unRegFn: (regEvent: IRegisteredEvent) => boolean) {
if (evtName.type) {
_doUnregister(target, _getRegisteredEvents(target, evtName.type), evtName, unRegFn);
} else {
let eventCache = elmNodeData.get<IAiEvents>(target, strEvents, {});
objForEachKey(eventCache, (evtType, events) => {
_doUnregister(target, events, evtName, unRegFn);
});
// Cleanup
if (objKeys(eventCache).length === 0) {
elmNodeData.kill(target, strEvents);
}
}
}
export function mergeEvtNamespace(theNamespace: string, namespaces?: string | string[] | null): string | string[] {
let newNamespaces: string | string[];
if (namespaces) {
if (isArray(namespaces)) {
newNamespaces = [theNamespace].concat(namespaces);
} else {
newNamespaces = [ theNamespace, namespaces ];
}
// resort the namespaces so they are always in order
newNamespaces = (_getEvtNamespace("xx", newNamespaces).ns).split(".");
} else {
newNamespaces = theNamespace;
}
return newNamespaces;
}
/**
* Binds the specified function to an event, so that the function gets called whenever the event fires on the object
* @param obj Object to add the event too.
* @param eventName String that specifies any of the standard DHTML Events without "on" prefix, if may also include an optional (dot "." prefixed)
* namespaces "click" "click.mynamespace" in addition to specific namespaces.
* @param handlerRef Pointer that specifies the function to call when event fires
* @param evtNamespace - [Optional] Additional namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace,
* if the eventName also includes a namespace the namespace(s) are merged into a single namespace
* @param useCapture [Optional] Defaults to false
* @returns True if the function was bound successfully to the event, otherwise false
*/
export function eventOn<T>(target: T, eventName: string, handlerRef: any, evtNamespace?: string | string[] | null, useCapture: boolean = false) {
let result = false;
if (target) {
try {
let evtName = _getEvtNamespace(eventName, evtNamespace);
result = _doAttach(target, evtName, handlerRef, useCapture);
if (result && elmNodeData.accept(target)) {
let registeredEvent: IRegisteredEvent = {
guid: _guid++,
evtName: evtName,
handler: handlerRef,
capture: useCapture
};
_getRegisteredEvents(target, evtName.type).push(registeredEvent);
}
} catch (e) {
// Just Ignore any error so that we don't break any execution path
}
}
return result;
}
/**
* Removes an event handler for the specified event
* @param Object to remove the event from
* @param eventName {string} - The name of the event, with optional namespaces or just the namespaces,
* such as "click", "click.mynamespace" or ".mynamespace"
* @param handlerRef {any} - The callback function that needs to be removed from the given event, when using a
* namespace (with or without a qualifying event) this may be null to remove all previously attached event handlers
* otherwise this will only remove events with this specific handler.
* @param evtNamespace - [Optional] Additional namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace,
* if the eventName also includes a namespace the namespace(s) are merged into a single namespace
* @param useCapture [Optional] Defaults to false
*/
export function eventOff<T>(target: T, eventName: string, handlerRef: any, evtNamespace?: string | string[] | null, useCapture: boolean = false) {
if (target) {
try {
let evtName = _getEvtNamespace(eventName, evtNamespace);
let found = false;
_unregisterEvents(target, evtName, (regEvent) => {
if ((evtName.ns && !handlerRef) || regEvent.handler === handlerRef) {
found = true;
return true;
}
return false;
});
if (!found) {
// fallback to try and remove as requested
_doDetach(target, evtName, handlerRef, useCapture);
}
} catch (e) {
// Just Ignore any error so that we don't break any execution path
}
}
}
/**
* Binds the specified function to an event, so that the function gets called whenever the event fires on the object
* @param obj Object to add the event too.
* @param eventNameWithoutOn String that specifies any of the standard DHTML Events without "on" prefix and optional (dot "." prefixed) namespaces "click" "click.mynamespace".
* @param handlerRef Pointer that specifies the function to call when event fires
* @param useCapture [Optional] Defaults to false
* @returns True if the function was bound successfully to the event, otherwise false
*/
export function attachEvent(obj: any, eventNameWithoutOn: string, handlerRef: any, useCapture: boolean = false) {
return eventOn(obj, eventNameWithoutOn, handlerRef, null, useCapture);
}
/**
* Removes an event handler for the specified event
* @param Object to remove the event from
* @param eventNameWithoutOn {string} - The name of the event, with optional namespaces or just the namespaces,
* such as "click", "click.mynamespace" or ".mynamespace"
* @param handlerRef {any} - The callback function that needs to be removed from the given event, when using a
* namespace (with or without a qualifying event) this may be null to remove all previously attached event handlers
* otherwise this will only remove events with this specific handler.
* @param useCapture [Optional] Defaults to false
*/
export function detachEvent(obj: any, eventNameWithoutOn: string, handlerRef: any, useCapture: boolean = false) {
eventOff(obj, eventNameWithoutOn, handlerRef, null, useCapture);
}
/**
* Trys to add an event handler for the specified event to the window, body and document
* @param eventName {string} - The name of the event
* @param callback {any} - The callback function that needs to be executed for the given event
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
* @return {boolean} - true if the handler was successfully added
*/
export function addEventHandler(eventName: string, callback: any, evtNamespace?: string | string[] | null): boolean {
let result = false;
let w = getWindow();
if (w) {
result = eventOn(w, eventName, callback, evtNamespace);
result = eventOn(w["body"], eventName, callback, evtNamespace) || result;
}
let doc = getDocument();
if (doc) {
result = eventOn(doc, eventName, callback, evtNamespace) || result;
}
return result;
}
/**
* Trys to remove event handler(s) for the specified event/namespace to the window, body and document
* @param eventName {string} - The name of the event, with optional namespaces or just the namespaces,
* such as "click", "click.mynamespace" or ".mynamespace"
* @param callback {any} - - The callback function that needs to be removed from the given event, when using a
* namespace (with or without a qualifying event) this may be null to remove all previously attached event handlers
* otherwise this will only remove events with this specific handler.
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
*/
export function removeEventHandler(eventName: string, callback: any, evtNamespace?: string | string[] | null) {
let w = getWindow();
if (w) {
eventOff(w, eventName, callback, evtNamespace);
eventOff(w["body"], eventName, callback, evtNamespace);
}
let doc = getDocument();
if (doc) {
eventOff(doc, eventName, callback, evtNamespace);
}
}
/**
* Bind the listener to the array of events
* @param events An string array of event names to bind the listener to
* @param listener The event callback to call when the event is triggered
* @param excludeEvents - [Optional] An array of events that should not be hooked (if possible), unless no other events can be.
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
* @returns true - when at least one of the events was registered otherwise false
*/
function _addEventListeners(events: string[], listener: any, excludeEvents?: string[] | null, evtNamespace?: string | string[] | null): boolean {
let added = false;
if (listener && events && events.length > 0) {
arrForEach(events, (name) => {
if (name) {
if (!excludeEvents || arrIndexOf(excludeEvents, name) === -1) {
added = addEventHandler(name, listener, evtNamespace) || added;
}
}
});
}
return added;
}
/**
* Bind the listener to the array of events
* @param events An string array of event names to bind the listener to
* @param listener The event callback to call when the event is triggered
* @param excludeEvents - [Optional] An array of events that should not be hooked (if possible), unless no other events can be.
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
* @returns true - when at least one of the events was registered otherwise false
*/
export function addEventListeners(events: string[], listener: any, excludeEvents?: string[], evtNamespace?: string | string[]): boolean {
let added = false;
if (listener && events && isArray(events)) {
added = _addEventListeners(events, listener, excludeEvents, evtNamespace);
if (!added && excludeEvents && excludeEvents.length > 0) {
// Failed to add any listeners and we excluded some, so just attempt to add the excluded events
added = _addEventListeners(events, listener, null, evtNamespace);
}
}
return added;
}
/**
* Remove the listener from the array of events
* @param events An string array of event names to bind the listener to
* @param listener The event callback to call when the event is triggered
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
*/
export function removeEventListeners(events: string[], listener: any, evtNamespace?: string | string[]) {
if (events && isArray(events)) {
arrForEach(events, (name) => {
if (name) {
removeEventHandler(name, listener, evtNamespace);
}
});
}
}
/**
* Listen to the 'beforeunload', 'unload' and 'pagehide' events which indicates a page unload is occurring,
* this does NOT listen to the 'visibilitychange' event as while it does indicate that the page is being hidden
* it does not *necessarily* mean that the page is being completely unloaded, it can mean that the user is
* just navigating to a different Tab and may come back (without unloading the page). As such you may also
* need to listen to the 'addPageHideEventListener' and 'addPageShowEventListener' events.
* @param listener - The event callback to call when a page unload event is triggered
* @param excludeEvents - [Optional] An array of events that should not be hooked, unless no other events can be.
* @param evtNamespace - [Optional] Namespace(s) to append to the event listeners so they can be uniquely identified and removed based on this namespace.
* @returns true - when at least one of the events was registered otherwise false
*/
export function addPageUnloadEventListener(listener: any, excludeEvents?: string[], evtNamespace?: string | string[]): boolean {
// Hook the unload event for the document, window and body to ensure that the client events are flushed to the server
// As just hooking the window does not always fire (on chrome) for page navigation's.
return addEventListeners([strBeforeUnload, strUnload, strPageHide], listener, excludeEvents, evtNamespace);
}
/**
* Remove any matching 'beforeunload', 'unload' and 'pagehide' events that may have been added via addEventListener,
* addEventListeners, addPageUnloadEventListener or addPageHideEventListener.
* @param listener - The specific event callback to to be removed
* @param evtNamespace - [Optional] Namespace(s) uniquely identified and removed based on this namespace.
* @returns true - when at least one of the events was registered otherwise false
*/
export function removePageUnloadEventListener(listener: any, evtNamespace?: string | string[]) {
removeEventListeners([strBeforeUnload, strUnload, strPageHide], listener, evtNamespace);
}
/**
* Listen to the pagehide and visibility changing to 'hidden' events, because the 'visibilitychange' uses
* an internal proxy to detect the visibility state you SHOULD use a unique namespace when if you plan to call
* removePageShowEventListener as the remove ignores the listener argument for the 'visibilitychange' event.
* @param listener - The event callback to call when a page hide event is triggered
* @param excludeEvents - [Optional] An array of events that should not be hooked (if possible), unless no other events can be.
* @param evtNamespace - [Optional] A Namespace to append to the event listeners so they can be uniquely identified and removed
* based on this namespace. This call also adds an additional unique "pageshow" namespace to the events
* so that only the matching "removePageHideEventListener" can remove these events.
* Suggestion: pass as true if you are also calling addPageUnloadEventListener as that also hooks pagehide
* @returns true - when at least one of the events was registered otherwise false
*/
export function addPageHideEventListener(listener: any, excludeEvents?: string[] | null, evtNamespace?: string | string[] | null): boolean {
function _handlePageVisibility(evt: any) {
let doc = getDocument();
if (listener && doc && doc.visibilityState === "hidden") {
listener(evt);
}
}
// add the unique page show namespace to any provided namespace so we can only remove the ones added by "pagehide"
let newNamespaces = mergeEvtNamespace(strPageHideNamespace, evtNamespace);
let pageUnloadAdded = _addEventListeners([strPageHide], listener, excludeEvents, newNamespaces);
if (!excludeEvents || arrIndexOf(excludeEvents, strVisibilityChangeEvt) === -1) {
pageUnloadAdded = _addEventListeners([strVisibilityChangeEvt], _handlePageVisibility, excludeEvents, newNamespaces) || pageUnloadAdded;
}
if (!pageUnloadAdded && excludeEvents) {
// Failed to add any listeners and we where requested to exclude some, so just call again without excluding anything
pageUnloadAdded = addPageHideEventListener(listener, null, evtNamespace);
}
return pageUnloadAdded;
}
/**
* Removes the pageHide event listeners added by addPageHideEventListener, because the 'visibilitychange' uses
* an internal proxy to detect the visibility state you SHOULD use a unique namespace when calling addPageHideEventListener
* as the remove ignores the listener argument for the 'visibilitychange' event.
* @param listener - The specific listener to remove for the 'pageshow' event only (ignored for 'visibilitychange')
* @param evtNamespace - The unique namespace used when calling addPageShowEventListener
*/
export function removePageHideEventListener(listener: any, evtNamespace?: string | string[] | null) {
// add the unique page show namespace to any provided namespace so we only remove the ones added by "pagehide"
let newNamespaces = mergeEvtNamespace(strPageHideNamespace, evtNamespace);
removeEventListeners([strPageHide], listener, newNamespaces);
removeEventListeners([strVisibilityChangeEvt], null, newNamespaces);
}
/**
* Listen to the pageshow and visibility changing to 'visible' events, because the 'visibilitychange' uses
* an internal proxy to detect the visibility state you SHOULD use a unique namespace when if you plan to call
* removePageShowEventListener as the remove ignores the listener argument for the 'visibilitychange' event.
* @param listener - The event callback to call when a page is show event is triggered
* @param excludeEvents - [Optional] An array of events that should not be hooked (if possible), unless no other events can be.
* @param evtNamespace - [Optional/Recommended] A Namespace to append to the event listeners so they can be uniquely
* identified and removed based on this namespace. This call also adds an additional unique "pageshow" namespace to the events
* so that only the matching "removePageShowEventListener" can remove these events.
* @returns true - when at least one of the events was registered otherwise false
*/
export function addPageShowEventListener(listener: any, excludeEvents?: string[] | null, evtNamespace?: string | string[] | null): boolean {
function _handlePageVisibility(evt: any) {
let doc = getDocument();
if (listener && doc && doc.visibilityState === "visible") {
listener(evt);
}
}
// add the unique page show namespace to any provided namespace so we can only remove the ones added by "pageshow"
let newNamespaces = mergeEvtNamespace(strPageShowNamespace, evtNamespace);
let pageShowAdded = _addEventListeners([strPageShow], listener, excludeEvents, newNamespaces);
pageShowAdded = _addEventListeners([strVisibilityChangeEvt], _handlePageVisibility, excludeEvents, newNamespaces) || pageShowAdded;
if (!pageShowAdded && excludeEvents) {
// Failed to add any listeners and we where requested to exclude some, so just call again without excluding anything
pageShowAdded = addPageShowEventListener(listener, null, evtNamespace);
}
return pageShowAdded;
}
/**
* Removes the pageShow event listeners added by addPageShowEventListener, because the 'visibilitychange' uses
* an internal proxy to detect the visibility state you SHOULD use a unique namespace when calling addPageShowEventListener
* as the remove ignores the listener argument for the 'visibilitychange' event.
* @param listener - The specific listener to remove for the 'pageshow' event only (ignored for 'visibilitychange')
* @param evtNamespace - The unique namespace used when calling addPageShowEventListener
*/
export function removePageShowEventListener(listener: any, evtNamespace?: string | string[] | null) {
// add the unique page show namespace to any provided namespace so we only remove the ones added by "pageshow"
let newNamespaces = mergeEvtNamespace(strPageShowNamespace, evtNamespace);
removeEventListeners([strPageShow], listener, newNamespaces);
removeEventListeners([strVisibilityChangeEvt], null, newNamespaces);
} | the_stack |
import { ServiceClientOptions, RequestOptions, ServiceCallback, HttpOperationResponse } from 'ms-rest';
import * as models from '../models';
/**
* @class
* WorkspaceCollections
* __NOTE__: An instance of this class is automatically created for an
* instance of the PowerBIEmbeddedManagementClient.
*/
export interface WorkspaceCollections {
/**
* Retrieves an existing Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollection>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getByNameWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollection>>;
/**
* Retrieves an existing Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollection} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollection} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollection} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
getByName(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollection>;
getByName(resourceGroupName: string, workspaceCollectionName: string, callback: ServiceCallback<models.WorkspaceCollection>): void;
getByName(resourceGroupName: string, workspaceCollectionName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollection>): void;
/**
* Creates a new Power BI Workspace Collection with the specified properties. A
* Power BI Workspace Collection contains one or more workspaces, and can be
* used to provision keys that provide API access to those workspaces.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Create workspace collection request
*
* @param {string} [body.location] Azure location
*
* @param {object} [body.tags]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollection>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, body: models.CreateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollection>>;
/**
* Creates a new Power BI Workspace Collection with the specified properties. A
* Power BI Workspace Collection contains one or more workspaces, and can be
* used to provision keys that provide API access to those workspaces.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Create workspace collection request
*
* @param {string} [body.location] Azure location
*
* @param {object} [body.tags]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollection} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollection} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollection} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
create(resourceGroupName: string, workspaceCollectionName: string, body: models.CreateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollection>;
create(resourceGroupName: string, workspaceCollectionName: string, body: models.CreateWorkspaceCollectionRequest, callback: ServiceCallback<models.WorkspaceCollection>): void;
create(resourceGroupName: string, workspaceCollectionName: string, body: models.CreateWorkspaceCollectionRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollection>): void;
/**
* Update an existing Power BI Workspace Collection with the specified
* properties.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Update workspace collection request
*
* @param {object} [body.tags]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollection>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
updateWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, body: models.UpdateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollection>>;
/**
* Update an existing Power BI Workspace Collection with the specified
* properties.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Update workspace collection request
*
* @param {object} [body.tags]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollection} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollection} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollection} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
update(resourceGroupName: string, workspaceCollectionName: string, body: models.UpdateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollection>;
update(resourceGroupName: string, workspaceCollectionName: string, body: models.UpdateWorkspaceCollectionRequest, callback: ServiceCallback<models.WorkspaceCollection>): void;
update(resourceGroupName: string, workspaceCollectionName: string, body: models.UpdateWorkspaceCollectionRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollection>): void;
/**
* Delete a Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, workspaceCollectionName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, workspaceCollectionName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Verify the specified Power BI Workspace Collection name is valid and not
* already in use.
*
* @param {string} location Azure location
*
* @param {object} body Check name availability request
*
* @param {string} [body.name] Workspace collection name
*
* @param {string} [body.type] Resource type
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<CheckNameResponse>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
checkNameAvailabilityWithHttpOperationResponse(location: string, body: models.CheckNameRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.CheckNameResponse>>;
/**
* Verify the specified Power BI Workspace Collection name is valid and not
* already in use.
*
* @param {string} location Azure location
*
* @param {object} body Check name availability request
*
* @param {string} [body.name] Workspace collection name
*
* @param {string} [body.type] Resource type
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {CheckNameResponse} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {CheckNameResponse} [result] - The deserialized result object if an error did not occur.
* See {@link CheckNameResponse} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
checkNameAvailability(location: string, body: models.CheckNameRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.CheckNameResponse>;
checkNameAvailability(location: string, body: models.CheckNameRequest, callback: ServiceCallback<models.CheckNameResponse>): void;
checkNameAvailability(location: string, body: models.CheckNameRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.CheckNameResponse>): void;
/**
* Retrieves all existing Power BI workspace collections in the specified
* resource group.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollectionList>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByResourceGroupWithHttpOperationResponse(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollectionList>>;
/**
* Retrieves all existing Power BI workspace collections in the specified
* resource group.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollectionList} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollectionList} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollectionList} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByResourceGroup(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollectionList>;
listByResourceGroup(resourceGroupName: string, callback: ServiceCallback<models.WorkspaceCollectionList>): void;
listByResourceGroup(resourceGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollectionList>): void;
/**
* Retrieves all existing Power BI workspace collections in the specified
* subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollectionList>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySubscriptionWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollectionList>>;
/**
* Retrieves all existing Power BI workspace collections in the specified
* subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollectionList} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollectionList} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollectionList} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySubscription(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollectionList>;
listBySubscription(callback: ServiceCallback<models.WorkspaceCollectionList>): void;
listBySubscription(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollectionList>): void;
/**
* Retrieves the primary and secondary access keys for the specified Power BI
* Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollectionAccessKeys>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getAccessKeysWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollectionAccessKeys>>;
/**
* Retrieves the primary and secondary access keys for the specified Power BI
* Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollectionAccessKeys} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollectionAccessKeys} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollectionAccessKeys} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollectionAccessKeys>;
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, callback: ServiceCallback<models.WorkspaceCollectionAccessKeys>): void;
getAccessKeys(resourceGroupName: string, workspaceCollectionName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollectionAccessKeys>): void;
/**
* Regenerates the primary or secondary access key for the specified Power BI
* Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Access key to regenerate
*
* @param {string} [body.keyName] Key name. Possible values include: 'key1',
* 'key2'
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceCollectionAccessKeys>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
regenerateKeyWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, body: models.WorkspaceCollectionAccessKey, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceCollectionAccessKeys>>;
/**
* Regenerates the primary or secondary access key for the specified Power BI
* Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} body Access key to regenerate
*
* @param {string} [body.keyName] Key name. Possible values include: 'key1',
* 'key2'
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceCollectionAccessKeys} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceCollectionAccessKeys} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceCollectionAccessKeys} for more
* information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: models.WorkspaceCollectionAccessKey, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceCollectionAccessKeys>;
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: models.WorkspaceCollectionAccessKey, callback: ServiceCallback<models.WorkspaceCollectionAccessKeys>): void;
regenerateKey(resourceGroupName: string, workspaceCollectionName: string, body: models.WorkspaceCollectionAccessKey, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceCollectionAccessKeys>): void;
/**
* Migrates an existing Power BI Workspace Collection to a different resource
* group and/or subscription.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {object} body Workspace migration request
*
* @param {string} [body.targetResourceGroup] Name of the resource group the
* Power BI workspace collections will be migrated to.
*
* @param {array} [body.resources]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
migrateWithHttpOperationResponse(resourceGroupName: string, body: models.MigrateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Migrates an existing Power BI Workspace Collection to a different resource
* group and/or subscription.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {object} body Workspace migration request
*
* @param {string} [body.targetResourceGroup] Name of the resource group the
* Power BI workspace collections will be migrated to.
*
* @param {array} [body.resources]
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
migrate(resourceGroupName: string, body: models.MigrateWorkspaceCollectionRequest, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
migrate(resourceGroupName: string, body: models.MigrateWorkspaceCollectionRequest, callback: ServiceCallback<void>): void;
migrate(resourceGroupName: string, body: models.MigrateWorkspaceCollectionRequest, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Delete a Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginDeleteMethodWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Delete a Power BI Workspace Collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginDeleteMethod(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginDeleteMethod(resourceGroupName: string, workspaceCollectionName: string, callback: ServiceCallback<void>): void;
beginDeleteMethod(resourceGroupName: string, workspaceCollectionName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
}
/**
* @class
* Workspaces
* __NOTE__: An instance of this class is automatically created for an
* instance of the PowerBIEmbeddedManagementClient.
*/
export interface Workspaces {
/**
* Retrieves all existing Power BI workspaces in the specified workspace
* collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceList>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listWithHttpOperationResponse(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceList>>;
/**
* Retrieves all existing Power BI workspaces in the specified workspace
* collection.
*
* @param {string} resourceGroupName Azure resource group
*
* @param {string} workspaceCollectionName Power BI Embedded Workspace
* Collection name
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceList} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceList} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceList} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
list(resourceGroupName: string, workspaceCollectionName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceList>;
list(resourceGroupName: string, workspaceCollectionName: string, callback: ServiceCallback<models.WorkspaceList>): void;
list(resourceGroupName: string, workspaceCollectionName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceList>): void;
} | the_stack |
import * as React from 'react';
import * as moment from 'moment';
import { cloneDeep, isEmpty } from '@microsoft/sp-lodash-subset';
import { Text } from '@microsoft/sp-core-library';
import { Dropdown, IDropdownOption, TextField, ChoiceGroup, IChoiceGroupOption } from 'office-ui-fabric-react';
import { NormalPeoplePicker, IPersonaProps, IBasePickerSuggestionsProps, Label } from 'office-ui-fabric-react';
import { TagPicker, ITag } from 'office-ui-fabric-react';
import { DatePicker, Checkbox } from 'office-ui-fabric-react';
import { IQueryFilter } from './IQueryFilter';
import { QueryFilterOperator } from './QueryFilterOperator';
import { QueryFilterJoin } from './QueryFilterJoin';
import { QueryFilterFieldType } from './QueryFilterFieldType';
import { IQueryFilterProps } from './IQueryFilterProps';
import { IQueryFilterState } from './IQueryFilterState';
import styles from './QueryFilter.module.scss';
export class QueryFilter extends React.Component<IQueryFilterProps, IQueryFilterState> {
/*************************************************************************************
* Stores the IQueryFilter config of the current filter
*************************************************************************************/
private filter:IQueryFilter;
/*************************************************************************************
* Component's constructor
* @param props
* @param state
*************************************************************************************/
constructor(props: IQueryFilterProps, state: IQueryFilterState) {
super(props);
moment.locale(this.props.strings.datePickerLocale);
this.state = {
filter: (this.props.filter ? cloneDeep(this.props.filter) : { index: 0, field: null, operator: QueryFilterOperator.Eq, value: '', join: QueryFilterJoin.Or }),
pickersKey: Math.random()
};
this.onAnyChange = this.onAnyChange.bind(this);
}
/*************************************************************************************
* When the field Dropdown changes
*************************************************************************************/
private onFieldDropdownChange(option: IDropdownOption, index?: number) {
let field = this.props.fields.filter((f) => { return f.internalName == option.key; });
this.state.filter.field = field != null && field.length > 0 ? field[0] : null;
this.state.filter.operator = (this.state.filter.field && (this.state.filter.field.type == QueryFilterFieldType.User || this.state.filter.field.type == QueryFilterFieldType.Taxonomy) ? QueryFilterOperator.ContainsAny : QueryFilterOperator.Eq);
this.state.filter.value = null;
this.state.filter.me = false;
this.state.filter.includeTime = false;
this.state.filter.expression = null;
this.setState({ filter: this.state.filter, pickersKey: Math.random() });
this.onAnyChange();
}
/*************************************************************************************
* When the operator Dropdown changes
*************************************************************************************/
private onOperatorDropdownChange(option: IDropdownOption, index?: number) {
this.state.filter.operator = QueryFilterOperator[option.key];
this.setState({ filter: this.state.filter, pickersKey: this.state.pickersKey });
this.onAnyChange();
}
/*************************************************************************************
* When the TextField value changes
*************************************************************************************/
private onValueTextFieldChange(newValue: string): string {
if(this.state.filter.value != newValue) {
this.state.filter.value = newValue;
this.onAnyChange();
}
return '';
}
/*************************************************************************************
* When the people picker value changes
*************************************************************************************/
private onPeoplePickerResolve(items: IPersonaProps[]) {
this.state.filter.value = items;
this.onAnyChange();
}
/*************************************************************************************
* When the "Me" checkbox changes
* @param ev : The React.FormEvent object which contains the element that has changed
* @param checked : Whether the checkbox is not checked or not
*************************************************************************************/
private onPeoplePickerCheckboxChange(ev?: React.FormEvent<HTMLInputElement>, checked?: boolean) {
this.state.filter.me = checked;
this.setState({ filter: this.state.filter, pickersKey: this.state.pickersKey });
this.onAnyChange();
}
/*************************************************************************************
* When the NormalPeoplePicker value changes
*************************************************************************************/
private onTaxonomyPickerResolve(items: ITag[]) {
this.state.filter.value = items;
this.onAnyChange();
}
/*************************************************************************************
* When the date picker value changes
*************************************************************************************/
private onDatePickerChange(date: Date) {
this.state.filter.value = date;
this.state.filter.expression = '';
this.setState({ filter: this.state.filter, pickersKey: this.state.pickersKey });
this.onAnyChange();
}
/*************************************************************************************
* When the date expression text field value changes
*************************************************************************************/
private onDateExpressionChange(newValue: string): string {
// Validates the picker
let regex = new RegExp(/^\[Today\](\s{0,}[\+-]\s{0,}\[{0,1}\d{1,4}\]{0,1}){0,1}$/);
let isValid = regex.test(newValue) || isEmpty(newValue);
let errorMsg = isValid ? '' : this.props.strings.datePickerExpressionError;
if(isValid) {
// If the change is NOT triggered by the date picker change
if(!(isEmpty(newValue) && this.state.filter.value != null)) {
this.state.filter.value = null;
this.state.filter.expression = newValue;
this.setState({ filter: this.state.filter, pickersKey: this.state.pickersKey });
this.onAnyChange();
}
}
return errorMsg;
}
/*************************************************************************************
* When the include time checkbox changes
* @param ev : The React.FormEvent object which contains the element that has changed
* @param checked : Whether the checkbox is not checked or not
*************************************************************************************/
private onDateIncludeTimeChange(ev?: React.FormEvent<HTMLInputElement>, checked?: boolean) {
this.state.filter.includeTime = checked;
this.onAnyChange();
}
/*************************************************************************************
* When the join ChoiceGroup changes
*************************************************************************************/
private onJoinChoiceChange(ev?: React.FormEvent<HTMLInputElement>, option?: IChoiceGroupOption) {
if(option) {
this.state.filter.join = QueryFilterJoin[option.key];
this.onAnyChange();
}
}
/*************************************************************************************
* Call the parent onChanged with the updated IQueryFilter object
*************************************************************************************/
private onAnyChange() {
if(this.props.onChanged) {
this.props.onChanged(this.state.filter);
}
}
/*************************************************************************************
* Returns the options for the field Dropdown component
*************************************************************************************/
private getFieldDropdownOptions(): IDropdownOption[] {
let options:IDropdownOption[] = [
{ key: "", text: this.props.strings.fieldSelectLabel }
];
for(let field of this.props.fields) {
let option:IDropdownOption = { key: field.internalName, text: Text.format("{0} \{\{{1}\}\}", field.displayName, field.internalName) };
options.push(option);
}
return options;
}
/*************************************************************************************
* Returns the options for the operator Dropdown component
*************************************************************************************/
private getOperatorDropdownOptions(): IDropdownOption[] {
let fieldType = this.state.filter.field ? this.state.filter.field.type : QueryFilterFieldType.Text;
let options:IDropdownOption[];
// Operators for User and Taxonomy field types
if(fieldType == QueryFilterFieldType.User || fieldType == QueryFilterFieldType.Taxonomy) {
options = [
{ key: QueryFilterOperator[QueryFilterOperator.ContainsAny], text: this.props.strings.operatorContainsAnyLabel },
{ key: QueryFilterOperator[QueryFilterOperator.ContainsAll], text: this.props.strings.operatorContainsAllLabel },
{ key: QueryFilterOperator[QueryFilterOperator.IsNull], text: this.props.strings.operatorIsNullLabel },
{ key: QueryFilterOperator[QueryFilterOperator.IsNotNull], text: this.props.strings.operatorIsNotNullLabel }
];
}
// Operators for Text, Number, Datetime and Lookup field types
else {
options = [
{ key: QueryFilterOperator[QueryFilterOperator.Eq], text: this.props.strings.operatorEqualLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Neq], text: this.props.strings.operatorNotEqualLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Gt], text: this.props.strings.operatorGreaterLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Lt], text: this.props.strings.operatorLessLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Geq], text: this.props.strings.operatorGreaterEqualLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Leq], text: this.props.strings.operatorLessEqualLabel },
{ key: QueryFilterOperator[QueryFilterOperator.IsNull], text: this.props.strings.operatorIsNullLabel },
{ key: QueryFilterOperator[QueryFilterOperator.IsNotNull], text: this.props.strings.operatorIsNotNullLabel }
];
// Specific operators for text field type
if(fieldType == QueryFilterFieldType.Text) {
options = options.concat([
{ key: QueryFilterOperator[QueryFilterOperator.BeginsWith], text: this.props.strings.operatorBeginsWithLabel },
{ key: QueryFilterOperator[QueryFilterOperator.Contains], text: this.props.strings.operatorContainsLabel }
]);
}
}
return options;
}
/*************************************************************************************
* Returns the options for the operator Dropdown component
*************************************************************************************/
private getJoinGroupOptions(): IChoiceGroupOption[] {
let options:IChoiceGroupOption[] = [
{ key: QueryFilterJoin[QueryFilterJoin.And], text: this.props.strings.andLabel, checked: (this.state.filter.join == QueryFilterJoin.And) },
{ key: QueryFilterJoin[QueryFilterJoin.Or], text: this.props.strings.orLabel, checked: (this.state.filter.join == QueryFilterJoin.Or) }
];
return options;
}
/*************************************************************************************
* Returns the user suggestions based on the specified user-entered filter
*************************************************************************************/
private onLoadPeoplePickerSuggestions(filterText: string, currentPersonas: IPersonaProps[], limitResults?: number) {
if(isEmpty(filterText)) {
return [];
}
return this.props.onLoadPeoplePickerSuggestions(filterText, currentPersonas, limitResults);
}
/*************************************************************************************
* Returns the tag suggestions based on the specified user-entered filter
*************************************************************************************/
private onLoadTagPickerSuggestions(filterText: string, currentTerms: ITag[]) {
if(isEmpty(filterText)) {
return [];
}
return this.props.onLoadTaxonomyPickerSuggestions(this.state.filter.field, filterText, currentTerms);
}
/*************************************************************************************
* Converts the specified filter value into a Date object if valid, otherwise null
* @param dateValue : The filter value that must be transformed into a Date object
*************************************************************************************/
private getDatePickerValue(dateValue: string | Date | IPersonaProps[] | ITag[]): Date {
if(dateValue instanceof Date) {
return dateValue;
}
else if(typeof(dateValue) === 'string') {
let date = moment(dateValue, moment.ISO_8601, true);
if(date.isValid()) {
return date.toDate();
}
}
return null;
}
/*************************************************************************************
* Converts the date resolved by the DatePicker into a formatted string
* @param date : The date resolved by the DatePicker
*************************************************************************************/
private onDatePickerFormat(date: Date): string {
return moment(date).format(this.props.strings.datePickerFormat);
}
/*************************************************************************************
* Converts the string manually entered by the user in the people picker to a Date
* @param dateStr : The string that must be parsed to a Date object
*************************************************************************************/
private onDatePickerParse(dateStr: string) : Date {
let date = moment(dateStr, this.props.strings.datePickerFormat, true);
return date.toDate();
}
/*************************************************************************************
* Renders the the QueryFilter component
*************************************************************************************/
public render() {
const filterFieldKey = this.state.filter.field != null ? this.state.filter.field.internalName : "";
const datePickerValue = this.getDatePickerValue(this.state.filter.value);
const hideValueSection = this.state.filter.operator == QueryFilterOperator.IsNull || this.state.filter.operator == QueryFilterOperator.IsNotNull;
const showTextField = (!this.state.filter.field || (this.state.filter.field.type == QueryFilterFieldType.Text || this.state.filter.field.type == QueryFilterFieldType.Number || this.state.filter.field.type == QueryFilterFieldType.Lookup)) && !hideValueSection;
const showPeoplePicker = this.state.filter.field && this.state.filter.field.type == QueryFilterFieldType.User && !hideValueSection;
const showTaxonomyPicker = this.state.filter.field && this.state.filter.field.type == QueryFilterFieldType.Taxonomy && !hideValueSection;
const showDatePicker = this.state.filter.field && this.state.filter.field.type == QueryFilterFieldType.Datetime && !hideValueSection;
const taxonomyPickerSuggestionProps: IBasePickerSuggestionsProps = {
suggestionsHeaderText: this.props.strings.taxonomyPickerSuggestionHeader,
noResultsFoundText: this.props.strings.taxonomyPickerNoResults,
loadingText: this.props.strings.taxonomyPickerLoading
};
const peoplePickerSuggestionProps: IBasePickerSuggestionsProps = {
suggestionsHeaderText: this.props.strings.peoplePickerSuggestionHeader,
noResultsFoundText: this.props.strings.peoplePickerNoResults,
loadingText: this.props.strings.peoplePickerLoading
};
return (
<div className={styles.queryFilter + ' ' + (this.props.disabled ? styles.disabled : '')}>
<div className={styles.paddingContainer}>
<Dropdown label={this.props.strings.fieldLabel}
disabled={this.props.disabled}
onChanged={this.onFieldDropdownChange.bind(this)}
selectedKey={filterFieldKey}
options={this.getFieldDropdownOptions()} />
<Dropdown label={this.props.strings.operatorLabel}
disabled={this.props.disabled}
onChanged={this.onOperatorDropdownChange.bind(this)}
selectedKey={QueryFilterOperator[this.state.filter.operator]}
options={this.getOperatorDropdownOptions()} />
{ showTextField &&
<TextField label={this.props.strings.valueLabel}
disabled={this.props.disabled}
onGetErrorMessage={ this.onValueTextFieldChange.bind(this) }
deferredValidationTime={500}
value={ this.state.filter.value != null ? this.state.filter.value as string : '' } />
}
{ showPeoplePicker &&
<div>
<Label>{ this.props.strings.valueLabel }</Label>
<NormalPeoplePicker
onResolveSuggestions={ this.onLoadPeoplePickerSuggestions.bind(this) }
onChange={ this.onPeoplePickerResolve.bind(this) }
defaultSelectedItems= { this.state.filter.value as IPersonaProps[] }
getTextFromItem={ (user: IPersonaProps) => user.primaryText }
pickerSuggestionsProps={ peoplePickerSuggestionProps }
className={ styles.peoplePicker + (this.state.filter.me ? ' ' + styles.disabled : '') }
inputProps={{ disabled: this.state.filter.me }}
key={ "peoplePicker" + this.state.pickersKey } />
<Checkbox
label={ this.props.strings.peoplePickerMe }
onChange={ this.onPeoplePickerCheckboxChange.bind(this) }
checked={ this.state.filter.me } />
</div>
}
{ showTaxonomyPicker &&
<div>
<Label>{ this.props.strings.valueLabel }</Label>
<TagPicker
onResolveSuggestions={ this.onLoadTagPickerSuggestions.bind(this) }
onChange={ this.onTaxonomyPickerResolve.bind(this) }
defaultSelectedItems= { this.state.filter.value as ITag[] }
getTextFromItem={ (term: ITag) => term.name }
pickerSuggestionsProps={ taxonomyPickerSuggestionProps }
key={ "taxonomyPicker" + this.state.pickersKey } />
</div>
}
{ showDatePicker &&
<div>
<DatePicker
label={ this.props.strings.valueLabel }
placeholder={ this.props.strings.datePickerDatePlaceholder }
allowTextInput={ true }
value={ datePickerValue }
formatDate={ this.onDatePickerFormat.bind(this) }
parseDateFromString={ this.onDatePickerParse.bind(this) }
onSelectDate={ this.onDatePickerChange.bind(this) }
strings={ this.props.strings.datePickerStrings } />
<TextField
placeholder={ this.props.strings.datePickerExpressionPlaceholder }
onGetErrorMessage={ this.onDateExpressionChange.bind(this) }
deferredValidationTime={ 500 }
value={ this.state.filter.expression || '' } />
<Checkbox
label={ this.props.strings.datePickerIncludeTime }
onChange={ this.onDateIncludeTimeChange.bind(this) }
checked={ this.state.filter.includeTime } />
</div>
}
<ChoiceGroup options={this.getJoinGroupOptions()}
onChange={this.onJoinChoiceChange.bind(this)}
disabled={this.props.disabled} />
</div>
</div>
);
}
} | the_stack |
import { isObject, isFunction, isPropertyKey, isNumber, isDefined } from '@esfx/internal-guards';
import { AccessorPropertyDescriptor, MethodPropertyDescriptor } from '@esfx/type-model';
import { DecoratorSignature, MappedDecoratorSignature, MappedDecoratorFactorySignature, MappedDecoratorOrDecoratorFactorySignature } from './typeModel';
export interface ClassDescriptor<T extends Function = Function> {
kind: "class";
target: T;
}
export interface MemberDescriptor<T = any> {
kind: "member";
target: object;
key: string | symbol;
descriptor?: TypedPropertyDescriptor<T>;
}
export interface AccessorMemberDescriptor<T = any> extends MemberDescriptor<T> {
descriptor: AccessorPropertyDescriptor<T>;
}
export interface MethodMemberDescriptor<T extends (...args: any[]) => any = (...args: any[]) => any> extends MemberDescriptor<T> {
descriptor: MethodPropertyDescriptor<T>;
}
export interface FieldMemberDescriptor<T = any> extends MemberDescriptor<T> {
descriptor?: undefined;
}
export interface ParameterDescriptor {
kind: "parameter";
target: object;
key: string | symbol;
index: number;
}
export type DecoratorDescriptor = ClassDescriptor | MemberDescriptor | ParameterDescriptor;
export const defaultAccessorAttributes = Object.freeze({
enumerable: false,
configurable: true
});
export const defaultMethodAttributes = Object.freeze({
enumerable: false,
configurable: true,
writable: true
});
export const defaultFieldAttributes = Object.freeze({
enumerable: true,
configurable: true,
writable: true
});
export function createClassDescriptor<T extends Function = Function>(target: T): ClassDescriptor<T> {
return { kind: "class", target };
}
export function createMemberDescriptor<T = any>(target: object, propertyKey: PropertyKey, descriptor?: TypedPropertyDescriptor<T>): MemberDescriptor<T> {
return { kind: "member", target, key: toPropertyKey(propertyKey), descriptor };
}
export function createParameterDescriptor(target: object, propertyKey: PropertyKey, parameterIndex: number): ParameterDescriptor {
return { kind: "parameter", target, key: toPropertyKey(propertyKey), index: parameterIndex };
}
export function isClass(value: DecoratorDescriptor): value is ClassDescriptor {
return value.kind === "class";
}
export function isMember(value: DecoratorDescriptor): value is MemberDescriptor {
return value.kind === "member";
}
export function isAccessor(value: DecoratorDescriptor): value is AccessorMemberDescriptor {
return value.kind === "member"
&& isObject(value.descriptor)
&& (isFunction(value.descriptor.get) || isFunction(value.descriptor.set));
}
export function isMethod(value: DecoratorDescriptor): value is MethodMemberDescriptor {
return value.kind === "member"
&& isObject(value.descriptor)
&& isFunction(value.descriptor.value);
}
export function isField(value: DecoratorDescriptor): value is FieldMemberDescriptor {
return value.kind === "member"
&& value.descriptor === undefined;
}
export function isStatic(value: MemberDescriptor | ParameterDescriptor): boolean {
return isFunction(value.target)
&& value.target.prototype.constructor === value.target;
}
export function isNonStatic(value: MemberDescriptor | ParameterDescriptor): boolean {
return isObject(value.target)
&& value.target.constructor.prototype === value.target;
}
export function isParameter(value: DecoratorDescriptor): value is ParameterDescriptor {
return value.kind === "parameter";
}
export type ClassDecoratorArguments = Parameters<(target: Function) => void>;
export type MemberDecoratorArguments = Parameters<(target: object, propertyKey: PropertyKey, descriptor?: PropertyDescriptor) => void>;
export type ParameterDecoratorArguments = Parameters<(target: object, propertyKey: PropertyKey, parameterIndex: number) => void>;
export type DecoratorArguments =
| ClassDecoratorArguments
| MemberDecoratorArguments
| ParameterDecoratorArguments;
export function isParameterDecoratorArguments(args: DecoratorArguments | IArguments | unknown[]): args is ParameterDecoratorArguments {
return args.length === 3
&& isObject(args[0])
&& isPropertyKey(args[1])
&& isNumber(args[2]);
}
export function isMemberDecoratorArguments(args: DecoratorArguments | IArguments | unknown[]): args is MemberDecoratorArguments {
return args.length === 2
? isObject(args[0]) && isPropertyKey(args[1])
: args.length >= 3 && isObject(args[0]) && isPropertyKey(args[1]) && (args[2] === undefined || (isObject(args[2]) && !isFunction(args[2])));
}
export function isClassDecoratorArguments(args: DecoratorArguments | IArguments | unknown[]): args is ClassDecoratorArguments {
return args.length === 1
&& isFunction(args[0]);
}
export function isDecoratorArguments(args: DecoratorArguments | IArguments | unknown[]): args is DecoratorArguments {
return isClassDecoratorArguments(args)
|| isMemberDecoratorArguments(args)
|| isParameterDecoratorArguments(args);
}
export function getDecoratorInfoFromArguments(args: ClassDecoratorArguments): ClassDescriptor;
export function getDecoratorInfoFromArguments(args: MemberDecoratorArguments): MemberDescriptor;
export function getDecoratorInfoFromArguments(args: ParameterDecoratorArguments): ParameterDescriptor;
export function getDecoratorInfoFromArguments(args: DecoratorArguments): DecoratorDescriptor;
export function getDecoratorInfoFromArguments(args: DecoratorArguments | IArguments): DecoratorDescriptor | undefined;
export function getDecoratorInfoFromArguments(args: DecoratorArguments | IArguments) {
if (isParameterDecoratorArguments(args)) {
return createParameterDescriptor(args[0], args[1], args[2]);
}
if (isMemberDecoratorArguments(args)) {
return createMemberDescriptor(args[0], args[1], args[2]);
}
if (isClassDecoratorArguments(args)) {
return createClassDescriptor(args[0]);
}
}
function toPropertyKey(value: unknown): string | symbol {
return typeof value === "symbol" ? value : "" + value;
}
function __throw(e: unknown): never {
throw e;
}
export function createDecorator<S extends DecoratorSignature<[]>>(decorator: S): MappedDecoratorSignature<S>;
export function createDecorator<S extends (descriptor: DecoratorDescriptor) => any>(decorator: S) {
return (...args: DecoratorArguments) => decorator(getDecoratorInfoFromArguments(args) || __throw(new TypeError()));
}
export function createDecoratorFactory<S extends DecoratorSignature>(decorator: S): MappedDecoratorFactorySignature<S>;
export function createDecoratorFactory<A extends any[], F extends (descriptor: DecoratorDescriptor, ...args: A) => unknown>(decorator: F) {
return (...outer: A) => (...args: DecoratorArguments): ReturnType<F> => decorator(getDecoratorInfoFromArguments(args) || __throw(new TypeError()), ...outer) as ReturnType<F>;
}
export function createDecoratorOrDecoratorFactory<S extends DecoratorSignature>(decorator: S): MappedDecoratorOrDecoratorFactorySignature<S>;
export function createDecoratorOrDecoratorFactory<A extends any[], F extends (descriptor: DecoratorDescriptor, ...args: A | []) => unknown>(decorator: F) {
return (...outerArgs: A | DecoratorArguments) => isDecoratorArguments(outerArgs)
? decorator(getDecoratorInfoFromArguments(outerArgs) || __throw(new TypeError()))
: (...args: DecoratorArguments) => decorator(getDecoratorInfoFromArguments(args) || __throw(new TypeError()), ...outerArgs);
}
type DecorateArguments =
| Parameters<typeof decorateClass>
| Parameters<typeof decorateMember>;
function isDecorateClassArguments(args: DecorateArguments): args is Parameters<typeof decorateClass> {
return args.length === 2
&& Array.isArray(args[0])
&& isFunction(args[1]);
}
function isDecorateMemberArguments(args: DecorateArguments): args is Parameters<typeof decorateMember> {
return args.length >= 3
&& Array.isArray(args[0])
&& isObject(args[1])
&& isPropertyKey(args[2])
&& (args[3] === undefined || isObject(args[3]));
}
export function decorate(decorators: ((target: Function) => Function | void)[], target: Function): Function;
export function decorate(decorators: ((target: object, propertyKey: PropertyKey, descriptor?: PropertyDescriptor) => PropertyDescriptor | void)[], target: object, propertyKey: PropertyKey, descriptor?: PropertyDescriptor): PropertyDescriptor | void;
export function decorate(...args: DecorateArguments) {
if (isDecorateClassArguments(args)) return decorateClass(...args);
if (isDecorateMemberArguments(args)) return decorateMember(...args);
throw new TypeError();
}
export function decorateClass(decorators: ((target: Function) => Function | void)[], target: Function): Function {
for (let i = decorators.length - 1; i >= 0; i--) {
const decorator = decorators[i];
const decorated = decorator(target);
if (isDefined(decorated)) {
if (!isFunction(decorated)) throw new TypeError();
target = decorated;
}
}
return target;
}
export function decorateMember(decorators: ((target: object, propertyKey: PropertyKey, descriptor?: PropertyDescriptor) => PropertyDescriptor | void)[], target: object, propertyKey: PropertyKey, descriptor?: PropertyDescriptor): PropertyDescriptor | void {
if (typeof propertyKey !== "symbol") propertyKey = "" + propertyKey;
for (let i = decorators.length - 1; i >= 0; i--) {
const decorator = decorators[i];
const decorated = decorator(target, propertyKey, descriptor!);
if (isDefined(decorated)) {
if (!isObject(decorated)) throw new TypeError();
descriptor = decorated;
}
}
return descriptor;
} | the_stack |
import { useSelector } from "react-redux";
import { AppState } from "../AppState";
import { Constants as C } from "../Constants";
import { UserProfileDlg } from "../dlg/UserProfileDlg";
import { NodeActionType } from "../enums/NodeActionType";
import { TypeHandlerIntf } from "../intf/TypeHandlerIntf";
import * as J from "../JavaIntf";
import { PubSub } from "../PubSub";
import { Singletons } from "../Singletons";
import { ButtonBar } from "../widget/ButtonBar";
import { Clearfix } from "../widget/Clearfix";
import { Div } from "../widget/Div";
import { Icon } from "../widget/Icon";
import { IconButton } from "../widget/IconButton";
import { Img } from "../widget/Img";
import { Span } from "../widget/Span";
// todo-1: need to switch to the more efficient way of using nid attribute
// on elements (search for "nid:" in code), to avoid creating new functions
// every time this component renders (and same for entire app!)
let S: Singletons;
PubSub.sub(C.PUBSUB_SingletonsReady, (ctx: Singletons) => {
S = ctx;
});
/* General Widget that doesn't fit any more reusable or specific category other than a plain Div, but inherits capability of Comp class */
export class NodeCompRowHeader extends Div {
constructor(private node: J.NodeInfo, private allowAvatars: boolean, private isMainTree: boolean, private isFeed: boolean = false, private jumpButton: boolean = false) {
super(null, {
className: "header-text"
});
}
preRender(): void {
let state: AppState = useSelector((state: AppState) => state);
let node = this.node;
let children = [];
let avatarImg: Img = null;
if (this.allowAvatars && node.owner !== J.PrincipalName.ADMIN) {
avatarImg = S.render.makeAvatarImage(node, state);
if (avatarImg) {
children.push(avatarImg);
}
}
let priorityVal = S.props.getNodePropVal(J.NodeProp.PRIORITY, node);
let priority = (priorityVal && priorityVal !== "0") ? "P" + priorityVal : "";
// now that we have this stuff visible by default on all nodes, we don't want users to need to
// see 'admin' on all admin nodes. too noisy
if (node.owner && node.owner !== "?" && node.owner !== "admin") {
let displayName = node.displayName || ("@" + node.owner);
children.push(new Span(displayName, {
className: (node.owner === state.userName) ? "created-by-me" : "created-by-other",
title: "Show Profile",
onClick: (evt: any) => {
new UserProfileDlg(node.ownerId, state).open();
}
}));
}
let typeHandler: TypeHandlerIntf = S.plugin.getTypeHandler(node.type);
if (typeHandler) {
let iconClass = typeHandler.getIconClass();
if (iconClass) {
children.push(new Icon({
className: iconClass + " rowTypeIcon",
title: "Node Type: " + typeHandler.getName(),
onMouseOver: () => { S.quanta.draggableId = node.id; },
onMouseOut: () => { S.quanta.draggableId = null; }
}));
}
}
if (node.name) {
children.push(new Span(node.name, {
className: "btn-secondary nodeName",
title: "Copy name-based URL to clipboard",
onClick: () => {
let url = window.location.origin + S.util.getPathPartForNamedNode(node);
S.util.copyToClipboard(url);
S.util.flashMessage("Copied to Clipboard: " + url, "Clipboard", true);
}
}));
}
/* for admin user shwo id, ordinal, and type right on the row. We have a bug where
the logicalOrdinal is showing as -1 here, but it's just because it's not being set on the server. */
if (state.isAdminUser) {
// looks like root node of pages don't have this ordinal set (it's -1 so for now we just hide it in that case)
let ordinal = node.logicalOrdinal === -1 ? "" : node.logicalOrdinal;
children.push(new Span(ordinal + " [" + node.ordinal + "] " + node.type, { className: "marginRight" }));
}
children.push(new Icon({
className: "fa fa-link fa-lg marginRight",
title: "Show URLs for this node",
onClick: () => S.render.showNodeUrl(node, state)
}));
// Allow bookmarking any kind of node other than bookmark nodes.
if (!state.isAnonUser && node.type !== J.NodeType.BOOKMARK && node.type !== J.NodeType.BOOKMARK_LIST) {
children.push(new Icon({
className: "fa fa-bookmark fa-lg marginRight",
title: "Bookmark this Node",
onClick: () => S.edit.addBookmark(node, state)
}));
}
let publicReadOnly = S.props.isPublicReadOnly(node);
let actPubId = S.props.getNodePropVal(J.NodeProp.ACT_PUB_ID, node);
// always show a reply if activity pub, or else only if on feed view.
if ((this.isFeed && !publicReadOnly) || actPubId) {
children.push(new Icon({
title: "Reply to this Node",
className: "fa fa-reply fa-lg",
onClick: () => {
if (state.isAnonUser) {
S.util.showMessage("Login to create content and reply to nodes.", "Login!");
}
else {
S.edit.addNode(node.id, null, null, node.id, state);
}
}
}));
}
if (priority) {
children.push(new Span(priority, {
className: "priorityTag" + priorityVal
}));
}
let floatUpperRightDiv: Div = new Div(null, {
className: "float-right floatRightHeaderDiv"
});
if (node.lastModified) {
floatUpperRightDiv.addChild(new Span(S.util.formatDate(new Date(node.lastModified))));
}
if (S.props.isPublic(node)) {
let appendNode = S.props.isPublicWritable(node) ? "Anyone can reply" : "No Replies Allowed";
floatUpperRightDiv.addChild(new Icon({
className: "fa fa-globe fa-lg iconMarginLeft",
title: "Node is Public\n(" + appendNode + ")"
}));
}
else if (S.props.isShared(node)) {
let allSharingNames = S.util.getSharingNames(node, true);
let sharingNames = allSharingNames;
let isPublic = sharingNames.toLowerCase().indexOf("public") !== -1;
let nlIdx = sharingNames.indexOf("\n");
if (nlIdx !== -1) {
sharingNames = sharingNames.substring(nlIdx) + "+";
}
floatUpperRightDiv.addChild(
new Span(null, {
className: isPublic ? "sharingNamesDispPublic" : "sharingNamesDisp",
title: "Shared to:\n\n" + allSharingNames
}, [
// shos sharing names only if not public
!isPublic ? new Span(sharingNames) : null,
new Icon({
className: "fa fa-envelope fa-lg iconMarginLeft"
})
]));
}
let editingAllowed = S.edit.isEditAllowed(node, state);
let deleteAllowed = false;
let editableNode = true;
if (state.isAdminUser) {
editingAllowed = true;
editableNode = true;
deleteAllowed = true;
}
else if (typeHandler) {
if (editingAllowed) {
editingAllowed = typeHandler.allowAction(NodeActionType.editNode, node, state);
editableNode = typeHandler.allowAction(NodeActionType.editNode, node, state);
deleteAllowed = typeHandler.allowAction(NodeActionType.delete, node, state);
}
}
let editButton: IconButton = null;
let jumpButton: IconButton = null;
/* Note: if this is on the main tree then we don't show the edit button here because it'll be
showing up in a different place. We show here only for timeline, or search results views */
if (!this.isMainTree && state.userPreferences.editMode) {
if (editingAllowed && editableNode && !state.editNode) {
editButton = new IconButton("fa-edit", null, {
className: "marginLeft",
onClick: S.edit.runEditNodeByClick,
title: "Edit Node",
nid: node.id
});
}
if (deleteAllowed && node.id !== state.homeNodeId) {
floatUpperRightDiv.addChild(new Icon({
className: "fa fa-trash fa-lg buttonBarIcon",
title: "Delete selected nodes",
nid: node.id,
onClick: S.edit.deleteSelNodes
}));
}
}
let jumpButtonAdded = false;
/* If we're not on a search result display (or timeline) and there's a TARGET_ID on the node
then we need to show the jump button point to it.
NOTE: todo-1: This logic will be the key to how we can make
bookmarks work (a future feature). If bookmarks simply have the TARGET_ID then that basically
can make them functional as bookmarks, because TARGET_ID is essentially all it
takes to be a functional bookmark to the id.
*/
if (this.isMainTree) {
const targetId = S.props.getNodePropVal(J.NodeProp.TARGET_ID, this.node);
if (targetId) {
jumpButtonAdded = true;
jumpButton = new IconButton("fa-arrow-right", null, {
className: "marginLeft",
onClick: () => S.view.jumpToId(targetId),
title: "Jump to the Node"
});
}
}
if (this.jumpButton && !jumpButtonAdded) {
jumpButton = new IconButton("fa-arrow-right", null, {
className: "marginLeft",
onClick: () => S.srch.clickSearchNode(node.id, state),
title: "Jump to this Node in the Main Tab"
});
}
if (editButton || jumpButton) {
floatUpperRightDiv.addChild(new ButtonBar([editButton, jumpButton], null, "marginLeft"));
}
if (floatUpperRightDiv.hasChildren()) {
children.push(floatUpperRightDiv);
children.push(new Clearfix());
}
this.setChildren(children);
}
} | the_stack |
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message twirp.example.haberdasher.FindHatRPC
*/
export interface FindHatRPC {
/**
* @generated from protobuf field: string hat_id = 1;
*/
hatId: string;
}
/**
* @generated from protobuf message twirp.example.haberdasher.ListHatRPC
*/
export interface ListHatRPC {
/**
* @generated from protobuf field: repeated twirp.example.haberdasher.Filter filters = 1;
*/
filters: Filter[];
}
/**
* Size of a Hat, in inches.
*
* @generated from protobuf message twirp.example.haberdasher.Size
*/
export interface Size {
/**
* @generated from protobuf field: int32 inches = 1;
*/
inches: number; // must be > 0
}
/**
* A Hat is a piece of headwear made by a Haberdasher.
*
* @generated from protobuf message twirp.example.haberdasher.Hat
*/
export interface Hat {
/**
* @generated from protobuf field: string id = 1;
*/
id: string;
/**
* @generated from protobuf field: int32 inches = 2;
*/
inches: number;
/**
* @generated from protobuf field: string color = 3;
*/
color: string; // anything but "invisible"
/**
* @generated from protobuf field: string name = 4;
*/
name: string; // i.e. "bowler"
/**
* @generated from protobuf field: repeated twirp.example.haberdasher.Hat variants = 5;
*/
variants: Hat[];
}
/**
* @generated from protobuf message twirp.example.haberdasher.Filter
*/
export interface Filter {
/**
* @generated from protobuf field: string order_by = 1;
*/
orderBy: string;
/**
* @generated from protobuf field: twirp.example.haberdasher.Pagination pagination = 2;
*/
pagination?: Pagination;
}
/**
* @generated from protobuf message twirp.example.haberdasher.Pagination
*/
export interface Pagination {
/**
* @generated from protobuf field: int32 limit = 1;
*/
limit: number;
/**
* @generated from protobuf field: int32 offset = 2;
*/
offset: number;
}
// @generated message type with reflection information, may provide speed optimized methods
class FindHatRPC$Type extends MessageType<FindHatRPC> {
constructor() {
super("twirp.example.haberdasher.FindHatRPC", [
{ no: 1, name: "hat_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FindHatRPC): FindHatRPC {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string hat_id */ 1:
message.hatId = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FindHatRPC, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string hat_id = 1; */
if (message.hatId !== "")
writer.tag(1, WireType.LengthDelimited).string(message.hatId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.FindHatRPC
*/
export const FindHatRPC = new FindHatRPC$Type();
// @generated message type with reflection information, may provide speed optimized methods
class ListHatRPC$Type extends MessageType<ListHatRPC> {
constructor() {
super("twirp.example.haberdasher.ListHatRPC", [
{ no: 1, name: "filters", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Filter }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListHatRPC): ListHatRPC {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated twirp.example.haberdasher.Filter filters */ 1:
message.filters.push(Filter.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: ListHatRPC, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* repeated twirp.example.haberdasher.Filter filters = 1; */
for (let i = 0; i < message.filters.length; i++)
Filter.internalBinaryWrite(message.filters[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.ListHatRPC
*/
export const ListHatRPC = new ListHatRPC$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Size$Type extends MessageType<Size> {
constructor() {
super("twirp.example.haberdasher.Size", [
{ no: 1, name: "inches", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Size): Size {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 inches */ 1:
message.inches = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Size, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int32 inches = 1; */
if (message.inches !== 0)
writer.tag(1, WireType.Varint).int32(message.inches);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.Size
*/
export const Size = new Size$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Hat$Type extends MessageType<Hat> {
constructor() {
super("twirp.example.haberdasher.Hat", [
{ no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "inches", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
{ no: 3, name: "color", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "variants", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Hat }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Hat): Hat {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string id */ 1:
message.id = reader.string();
break;
case /* int32 inches */ 2:
message.inches = reader.int32();
break;
case /* string color */ 3:
message.color = reader.string();
break;
case /* string name */ 4:
message.name = reader.string();
break;
case /* repeated twirp.example.haberdasher.Hat variants */ 5:
message.variants.push(Hat.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Hat, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string id = 1; */
if (message.id !== "")
writer.tag(1, WireType.LengthDelimited).string(message.id);
/* int32 inches = 2; */
if (message.inches !== 0)
writer.tag(2, WireType.Varint).int32(message.inches);
/* string color = 3; */
if (message.color !== "")
writer.tag(3, WireType.LengthDelimited).string(message.color);
/* string name = 4; */
if (message.name !== "")
writer.tag(4, WireType.LengthDelimited).string(message.name);
/* repeated twirp.example.haberdasher.Hat variants = 5; */
for (let i = 0; i < message.variants.length; i++)
Hat.internalBinaryWrite(message.variants[i], writer.tag(5, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.Hat
*/
export const Hat = new Hat$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Filter$Type extends MessageType<Filter> {
constructor() {
super("twirp.example.haberdasher.Filter", [
{ no: 1, name: "order_by", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "pagination", kind: "message", T: () => Pagination }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Filter): Filter {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string order_by */ 1:
message.orderBy = reader.string();
break;
case /* twirp.example.haberdasher.Pagination pagination */ 2:
message.pagination = Pagination.internalBinaryRead(reader, reader.uint32(), options, message.pagination);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Filter, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string order_by = 1; */
if (message.orderBy !== "")
writer.tag(1, WireType.LengthDelimited).string(message.orderBy);
/* twirp.example.haberdasher.Pagination pagination = 2; */
if (message.pagination)
Pagination.internalBinaryWrite(message.pagination, writer.tag(2, WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.Filter
*/
export const Filter = new Filter$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Pagination$Type extends MessageType<Pagination> {
constructor() {
super("twirp.example.haberdasher.Pagination", [
{ no: 1, name: "limit", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
{ no: 2, name: "offset", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Pagination): Pagination {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 limit */ 1:
message.limit = reader.int32();
break;
case /* int32 offset */ 2:
message.offset = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Pagination, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int32 limit = 1; */
if (message.limit !== 0)
writer.tag(1, WireType.Varint).int32(message.limit);
/* int32 offset = 2; */
if (message.offset !== 0)
writer.tag(2, WireType.Varint).int32(message.offset);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message twirp.example.haberdasher.Pagination
*/
export const Pagination = new Pagination$Type();
/**
* @generated ServiceType for protobuf service twirp.example.haberdasher.Haberdasher
*/
export const Haberdasher = new ServiceType("twirp.example.haberdasher.Haberdasher", [
{ name: "MakeHat", options: { "google.api.http": { body: "*", post: "/hat" } }, I: Size, O: Hat },
{ name: "FindHat", options: { "google.api.http": { get: "/hat/{hat_id}" } }, I: FindHatRPC, O: FindHatRPC },
{ name: "ListHat", options: { "google.api.http": { get: "/hat" } }, I: ListHatRPC, O: ListHatRPC }
]); | the_stack |
import WebSocket from 'ws'
import fs from 'fs'
import elliptic from 'elliptic'
import {
keyGen,
getKeyPath,
getPass,
IClientNewKey,
KeyData,
ECCurveType } from './util'
import { KEYUTIL } from 'jsrsasign'
import {
Logger,
Checks,
LogLevelDesc,
LoggerProvider
} from '@hyperledger/cactus-common'
import readline from 'readline';
import axios from 'axios';
type IEcdsaCurves = {
[key: string]: elliptic.ec;
};
const EC = elliptic.ec
const ecdsaCurves = {}
for (const value in ECCurveType) {
ecdsaCurves[value] = new EC(value)
}
export interface WsWalletOpts {
// url of the server the wallet will connect to
endpoint?: string;
keyName?: string;
curve?: ECCurveType;
logLevel?: LogLevelDesc;
password?: string
// set to false for testing https/wss
strictSSL?: boolean;
}
export interface WsWalletRes {
signature: Buffer;
index: number;
}
export interface WsWalletReq {
digest: Buffer;
index: number;
}
export interface IWebSocketKey {
signature:string;
sessionId:string;
}
export class WsWallet {
public readonly className = 'WsWallet';
private readonly log: Logger;
private readonly endpoint: string;
private ecdsaCurves: IEcdsaCurves;
private keyData: KeyData;
private ws?: WebSocket;
constructor (private readonly opts: WsWalletOpts) {
const fnTag = `${this.className}#constructor()`
Checks.truthy(opts, `${fnTag} arg options`)
this.log = LoggerProvider.getOrCreate({
label: 'WsWallet',
level: opts.logLevel || 'TRACE'
})
this.opts.keyName = opts.keyName || 'default'
this.keyData = this.initKey(this.opts as IClientNewKey)
}
/**
* @description will generate a new EC private key, or get existing key it already exists
* @param args;
* @type IClientNewKey
*/
private initKey (args: IClientNewKey): KeyData {
const fnTag = `${this.className}#initKey`
this.log.debug(
`${fnTag} look for key with name '${args.keyName}' or generate new key`
)
this.close();
const info = []
const keyPath = getKeyPath(args.keyName)
if (!fs.existsSync(keyPath)) {
info.push(keyGen(args))
}
info.push(`extracting key '${args.keyName}' from key store`)
this.opts.keyName = args.keyName
const keyData = JSON.parse(fs.readFileSync(keyPath, 'utf8'))
const curve = keyData.curve
if (args.curve && curve !== args.curve) {
info.push(
`the requested curve type (${args.curve}) is different than the existing key: ${curve}`
)
}
const result = info.join('\n')
this.log.debug(`${fnTag} ${result}`)
return keyData
}
/**
* @description Closes existing and open new websocket connection for client
*/
public async open (sessionId: string, endpoint?: string): Promise<IWebSocketKey> {
const fnTag = `${this.className}#open`
this.opts.endpoint = endpoint || this.opts.endpoint
Checks.nonBlankString(this.opts.endpoint, `${fnTag}:this.opts.endpoint`)
this.log.debug(`${fnTag} web-socket connection to ${this.opts.endpoint} for ${this.opts.keyName}`)
this.close()
try {
this.log.debug(`${fnTag} retrieve password to unlock private key`)
this.opts.password = await unlockKey(this.keyData,this.opts.password,this.log)
this.log.debug(`${fnTag} sign session ID for ${this.getPubKeyHex().substring(0, 12)}...`)
const sessionSignature = (await this.sign(
Buffer.from(sessionId, 'hex'),
this.keyData,
this.opts.password,
this.log,
)).toString('hex')
const wsOpts = {
rejectUnauthorized: this.opts.strictSSL !== false,
headers: {
'x-signature': sessionSignature,
'x-session-id': sessionId,
'x-pub-key-pem': JSON.stringify(this.keyData.pubKey)
}
}
this.log.debug(`${fnTag} create web-socket client for ${this.opts.endpoint}`)
this.ws = new WebSocket(this.opts.endpoint, wsOpts)
const { opts, ws, sign, keyData, log } = this
this.ws.onopen = function () {
log.info(`${fnTag} connection opened to ${opts.endpoint} for key ${opts.keyName}`)
}
this.ws.on('message', async function incoming (digest:Buffer) { // message: WsWalletReq
const signature = await sign(digest,keyData,opts.password,log)
// const resp:WsWalletRes = {signature,index: message.index}
log.info(`${fnTag} send signature to ${ws.url}: ${signature.toString('base64')}`)
ws.send(signature)
})
this.ws.onclose = function incoming () {
log.info(`${fnTag} connection to ${opts.endpoint} closed for key ${opts.keyName}`)
}
return await new Promise<IWebSocketKey>(function (resolve, reject) {
ws.addEventListener(
'open',
function incoming () {
resolve({
signature: sessionSignature,
sessionId
})
},
{ once: true }
) as IWebSocketKey
ws.onerror = function (error) {
// TODO extract error message from failed connection
ws.close()
reject(new Error(error.error?.rawPacket?.toString()))
}
})
} catch (error) {
this.log.error(
`${fnTag} failed to connect to ${this.opts.endpoint}: ${error}`
)
//throw new Error(error)
}
}
/**
* @description : close the WebSocket
*/
async close (): Promise<void> {
if (this.ws) {
this.ws.close()
}
}
/**
* @description send out pubKey
* @return pubKey pem file
*/
getPubKeyHex () {
const { pubKeyHex } = KEYUTIL.getKey(this.keyData.pubKey)
return pubKeyHex
}
/**
* @description request new session with ws-identity server (identity proxy
* to communicate with Fabric application) and webSocketKey for the session
* @param userId : name of key file stored by ws-wallet locally
* also sets the userID fof storing the WS-X.509 certificate enrolled with Fabric;
* @param endpoint: url to access of the Fabric application API
* to request a new ws-identity session ticket
* @return IWebSocketKey: the key needed to access the open web-socket conneciton
* @note the session ticket must be requested by the Fabric app
* The ws-identity server matches the IP used to request the ticket with
* the IP connecting to it later (other apps can't use the same sessionId)
*/
public async newSession(
endpoint:string,
keyName:string,
curve?:ECCurveType):Promise<IWebSocketKey>{
const fnTag = '#newSession'
this.log.debug(`${fnTag} open new web-socket session`)
let wsKey:IWebSocketKey;
try{
await this.approveRequest(endpoint)
let resp;
if(!this.opts.password){
this.log.info(`${fnTag} request declined`)
return
}
await axios.post(endpoint,{key_name: keyName},
{
headers: {
'accept': 'application/json',
'pub_key_hex': this.getPubKeyHex()
},
},
).then(async function(response) {
resp = response.data;
});
const {sessionId,url} = resp;
wsKey = await this.open(sessionId,url);
this.log.info(`${fnTag} web socket key issued`)
return wsKey;
}catch(err){
this.log.error(`${fnTag} error opening session ${err}`)
}
}
private async approveRequest(endpoint){
const fnTag = '#approveRequest'
let { password } = this.opts
this.log.info(`${fnTag} issue web-socket key for 3rd party application using endpoint ${endpoint}?`)
this.opts.password = await new Promise(async function (resolve, reject) {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
if(password){
rl.question('Yes(y) | No(n) :', function(approval) {
rl.close();
if(!approval.toLowerCase().charAt(0).includes("y")){
reject(null)
}
})
}else{
rl.close();
password = await getPass()
}
resolve(password)
})
}
/**s
* @description generate
* @param prehashed digest as Buffer
* @returns signature as string
*/
private async sign(digest: Buffer, keyData:KeyData, password: string, log:any): Promise<Buffer> {
const fnTag = '#sign'
log.debug(`${fnTag} digest-size = ${digest.length}`)
try {
const { prvKeyHex } = KEYUTIL.getKey(keyData.key, password)
const ecdsa = ecdsaCurves[keyData.curve]
const signKey = ecdsa.keyFromPrivate(prvKeyHex, 'hex')
const sig = ecdsa.sign(digest, signKey)
const signature = Buffer.from(sig.toDER())
return signature
} catch (error) {
console.log(error)
throw new Error(`${fnTag} failed to produce signature: ${error}`)
}
}
}
function unlockKey(keyData:KeyData,password?,log?,attempt=1): Promise<string>{
return new Promise(function (resolve, reject) {
if(attempt<4){
setTimeout(async function () {
password = await getPass(password);
try {
KEYUTIL.getKey(keyData.key, password)
resolve(password)
}catch(err){
attempt += 1
log.error(`Error unlocking key file: ${err}`)
unlockKey(keyData,null,log,attempt).then(resolve)
}
})
}else{
reject(new Error('Too many failed password attempts'))
}
})
}
/**
* Forces a process to wait until the socket's `readyState` becomes the specified value.
* not to be used in production !!!
* @param socket The socket whose `readyState` is being watched
* @param state The desired `readyState` for the socket
*/
export function waitForSocketState (
socket: WebSocket,
state: number
): Promise<void> {
return new Promise(function (resolve, reject) {
try {
setTimeout(function () {
if (socket.readyState === state) {
resolve()
} else {
waitForSocketState(socket, state).then(resolve)
}
})
} catch (err) {
reject(new Error(`Error waiting for socket state ${state}: ${err})`))
}
})
} | the_stack |
// Copyright (c) 2014 Henric Trotzig
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import parse from './parse';
const fs = require('fs');
const path = require('path');
const requireRelative = require('require-relative');
function findESNamedExports(node) {
if (node.type !== 'ExportNamedDeclaration') {
return [];
}
if (node.specifiers.length) {
return node.specifiers.map(({ exported }) => exported.name);
}
if (!node.declaration) {
return [];
}
if (
node.declaration.type === 'FunctionDeclaration' ||
node.declaration.type === 'ClassDeclaration'
) {
return [node.declaration.id.name];
}
const result = [];
node.declaration.declarations.forEach(({ id }) => {
if (id.type === 'ObjectPattern') {
// export const { foo, bar } = something();
result.push(...id.properties.map(({ key }) => key.name));
} else {
result.push(id.name);
}
});
return result;
}
function resolveNestedNamedExports(node, absolutePathToFile) {
if (node.type === 'ConditionalExpression') {
// Potential ternary-style export - we pick the first one
// module.exports = foo ? require('a') : require('b');
return resolveNestedNamedExports(node.consequent, absolutePathToFile);
}
if (
node.type === 'CallExpression' &&
node.callee.name === 'require' &&
node.arguments.length === 1 &&
node.arguments[0].type === 'StringLiteral'
) {
// module.exports = require('someOtherFile.js');
const pathToRequiredFile = requireRelative.resolve(
node.arguments[0].value,
path.dirname(absolutePathToFile));
const requiredFileContent = fs.readFileSync(pathToRequiredFile, 'utf8');
// eslint-disable-next-line no-use-before-define
const { named, defaultName } = findExports(requiredFileContent, pathToRequiredFile);
return {
named,
defaultName,
};
}
return undefined;
}
function findCommonJSExports(
node,
{
definedNames,
absolutePathToFile,
aliasesForExports,
},
) {
if (node.type !== 'ExpressionStatement') {
return [];
}
if (
node.expression.type === 'CallExpression' &&
node.expression.callee.type === 'MemberExpression' &&
aliasesForExports.has(node.expression.callee.object.name) &&
node.expression.callee.property.name === 'use' &&
node.expression.arguments.length &&
node.expression.arguments[0].type === 'Identifier'
) {
// exports.use(foo);
return [node.expression.arguments[0].name];
}
if (
node.expression.type === 'CallExpression' &&
node.expression.callee.type === 'MemberExpression' &&
node.expression.callee.object.name === 'Object' &&
node.expression.callee.property.name === 'defineProperty' &&
node.expression.arguments.length > 1 &&
node.expression.arguments[0].type === 'Identifier' &&
aliasesForExports.has(node.expression.arguments[0].name) &&
node.expression.arguments[1].type === 'StringLiteral'
) {
// Object.defineProperty(exports, 'foo', { ... });
return [node.expression.arguments[1].value];
}
const { left, right } = node.expression;
if (!left || !right) {
return [];
}
if (
(left.object &&
left.object.name === 'module' &&
left.property.name === 'exports') ||
aliasesForExports.has(left.name)
) {
const nestedNamed = resolveNestedNamedExports(right, absolutePathToFile);
if (nestedNamed) {
return nestedNamed;
}
// module.exports = { foo: 'foo' };
if (right.type === 'ObjectExpression') {
return right.properties.map(({ key }) => key.name).filter(Boolean);
}
if (right.type === 'Identifier') {
return definedNames[right.name] || [];
}
}
if (!left.object || !left.property) {
return [];
}
if (
left.object.type === 'MemberExpression' &&
left.object.object.name === 'module' &&
left.object.property.name === 'exports'
) {
// module.exports.foo = 'bar';
return [left.property.name];
}
if (
left.type === 'MemberExpression' &&
left.object.type === 'Identifier' &&
aliasesForExports.has(left.object.name)
) {
// exports.foo = 'bar';
return [left.property.name];
}
return [];
}
function findDefinedNames(node, definedNames) {
if (node.type === 'ExpressionStatement') {
const { left, right } = node.expression;
if (left && right) {
if (left.object) {
(definedNames[left.object.name] || []).push(left.property.name);
}
}
}
if (node.type !== 'VariableDeclaration') {
return;
}
node.declarations.forEach(({ id, init }) => {
if (!init) {
return;
}
if (init.type === 'ObjectExpression') {
// eslint-disable-next-line no-param-reassign
definedNames[id.name] = init.properties
.map(({ key }) => key && key.name)
.filter(Boolean);
} else if (init.type === 'FunctionExpression') {
definedNames[id.name] = []; // eslint-disable-line no-param-reassign
}
});
}
/**
* This function will find variable declarations where `exports` is redefined as
* something else. E.g.
*
* const moduleName = exports;
*/
function findAliasesForExports(nodes) {
const result = new Set(['exports']);
nodes.forEach((node) => {
if (node.type !== 'VariableDeclaration') {
return;
}
node.declarations.forEach(({ id, init }) => {
if (!init) {
return;
}
if (init.type !== 'Identifier') {
return;
}
if (init.name !== 'exports') {
return;
}
// We have something like
// var foo = exports;
result.add(id.name);
});
});
return result;
}
function findNamedExports(
nodes,
{
absolutePathToFile,
definedNames,
aliasesForExports,
},
) {
const result = [];
let defaultName = null;
nodes.forEach((node) => {
result.push(...findESNamedExports(node));
const named = findCommonJSExports(node, {
definedNames,
absolutePathToFile,
aliasesForExports,
})
if (Array.isArray(named)) {
result.push(...named);
} else {
result.push(...named.named);
defaultName = named.defaultName;
}
});
return {
named: result,
defaultName,
};
}
function getDefaultExport(nodes) {
let defaultName = null;
nodes.some((node) => {
if (node.type === 'ExportDefaultDeclaration') {
return true;
}
if (node.type !== 'ExpressionStatement') {
return false;
}
// Potential CommonJS export
const { left, right } = node.expression;
if (!left || !right) {
return false;
}
if (left.name === 'exports') {
return true;
}
if (!left.object || !left.property) {
// foo = 'bar';
return false;
}
if (left.object.name === 'module' && left.property.name === 'exports') {
defaultName = right.name;
return true;
}
});
return defaultName;
}
const DEFAULT_EXPORT_PATTERN = /\smodule\.exports\s*=\s*(\w+)/;
function findRawDefaultExport(data) {
const match = data.match(DEFAULT_EXPORT_PATTERN);
if (match) {
return match[1];
}
return undefined;
}
function findRawNamedExports(data) {
const result = new Set();
const pattern = /^exports\.(\w+)\s*=\s*[\w.]+;$/gm;
let match;
// eslint-disable-next-line no-cond-assign
while ((match = pattern.exec(data)) !== null) {
const name = match[1];
if (name !== 'default') {
result.add(name);
}
}
return Array.from(result);
}
function findRootNodes(ast) {
const realRootNodes = ast.program.body;
if (realRootNodes.length > 1) {
return realRootNodes;
}
try {
// Try finding the function body from this case:
//
// (function () {
// module.exports = { foo: 'foo' };
// }.call(this));
//
const { callee } = realRootNodes[0].expression;
if (callee.object) {
return callee.object.body.body;
}
return callee.body.body;
} catch (e) {
// ignore
}
return realRootNodes;
}
export default function findExports(data, absolutePathToFile) {
if (/\.json$/.test(absolutePathToFile)) {
return {
named: Object.keys(JSON.parse(data)),
hasDefault: true,
defaultName: null,
};
}
const ast = parse(data);
const rootNodes = findRootNodes(ast);
const aliasesForExports = findAliasesForExports(rootNodes);
const definedNames = {};
rootNodes.forEach((node) => {
findDefinedNames(node, definedNames);
});
let { named, defaultName } = findNamedExports(rootNodes, {
absolutePathToFile,
definedNames,
aliasesForExports,
});
const defaultNameOrigin = getDefaultExport(rootNodes);
let hasDefault = defaultNameOrigin != null || defaultName != null || aliasesForExports.size > 1;
if (!hasDefault) {
const rawExportedId = findRawDefaultExport(data);
hasDefault = !!rawExportedId;
if (!named.length) {
named.push(...(definedNames[rawExportedId] || []));
}
}
if (!named.length) {
named.push(...findRawNamedExports(data));
}
return {
named,
hasDefault,
defaultName: defaultName || defaultNameOrigin,
};
} | the_stack |
import {
HTMLAnchorElementAttrs, HTMLElementAttrs, HTMLAreaElementAttrs, HTMLAudioElementAttrs,
HTMLBaseElementAttrs, HTMLBodyElementAttrs, HTMLBRElementAttrs, HTMLButtonElementAttrs,
HTMLCanvasElementAttrs, HTMLQuoteElementAttrs, HTMLTableCaptionElementAttrs, HTMLTableColElementAttrs,
HTMLModElementAttrs, HTMLDivElementAttrs, HTMLDListElementAttrs, HTMLFieldSetElementAttrs, HTMLFormElementAttrs,
HTMLHeadElementAttrs, HTMLHeadingElementAttrs, HTMLHRElementAttrs, HTMLHtmlElementAttrs, HTMLIFrameElementAttrs,
HTMLImageElementAttrs, HTMLInputElementAttrs, HTMLLabelElementAttrs, HTMLLegendElementAttrs, HTMLLIElementAttrs,
HTMLLinkElementAttrs, HTMLMapElementAttrs, HTMLMenuElementAttrs, HTMLMetaElementAttrs, HTMLMeterElementAttrs,
HTMLOListElementAttrs, HTMLOptGroupElementAttrs, HTMLOptionElementAttrs, HTMLParagraphElementAttrs,
HTMLPictureElementAttrs, HTMLPreElementAttrs, HTMLProgressElementAttrs, HTMLScriptElementAttrs,
HTMLSelectElementAttrs, HTMLSourceElementAttrs, HTMLSpanElementAttrs, HTMLStyleElementAttrs,
HTMLTableDataCellElementAttrs, HTMLTableElementAttrs, HTMLTableHeaderCellElementAttrs, HTMLTableRowElementAttrs,
HTMLTableSectionElementAttrs, HTMLTemplateElementAttrs, HTMLTextAreaElementAttrs, HTMLTitleElementAttrs,
HTMLTrackElementAttrs, HTMLUListElementAttrs, HTMLVideoElementAttrs,
AttributeDirective, htmlElementFactory,
} from "ivi";
/**
* {@link AttributeDirective} with `""` value and {@link updateValue} sync function.
*/
const VALUE_EMPTY: AttributeDirective<string> = { v: "", u: updateValue };
/**
* {@link AttributeDirective} with `false` value and {@link updateChecked} sync function.
*/
const CHECKED_FALSE: AttributeDirective<boolean> = { v: false, u: updateChecked };
/**
* {@link AttributeDirective} with `true` value and {@link updateChecked} sync function.
*/
const CHECKED_TRUE: AttributeDirective<boolean> = { v: true, u: updateChecked };
/**
* Synchronization function for {@link AttributeDirective} created with {@link VALUE} function.
*
* @param element Target element.
* @param key Attribute key.
* @param prev Previous value.
* @param next Next value.
*/
function updateValue(
element: Element,
key: string,
prev: string | undefined,
next: string | undefined,
) {
if (prev === void 0) {
if (next !== "") {
(element as HTMLInputElement | HTMLTextAreaElement).value = next!;
}
} else if (next !== void 0 && ((element as HTMLInputElement | HTMLTextAreaElement).value !== next)) {
(element as HTMLInputElement | HTMLTextAreaElement).value = next;
}
}
/**
* Synchronization function for {@link AttributeDirective} created with {@link CHECKED} function.
*
* @param element Target element.
* @param key Attribute key.
* @param prev Previous value.
* @param next Next value.
*/
function updateChecked(
element: Element,
key: string,
prev: boolean | undefined,
next: boolean | undefined,
) {
if (prev === void 0) {
if (next) {
(element as HTMLInputElement).checked = next;
}
} else if (next !== void 0) {
if ((element as HTMLInputElement).checked !== next) {
(element as HTMLInputElement).checked = next!;
}
}
}
/**
* VALUE function creates a {@link AttributeDirective} that assigns a `value` property to an HTMLInputElement.
*
* @example
*
* const e = input("", { value: VALUE("value") });
*
* @param v Value.
* @returns {@link AttributeDirective}
*/
export const VALUE = (v: string): AttributeDirective<string> => (
v === "" ? VALUE_EMPTY : { v, u: updateValue }
);
/**
* CONTENT function creates a {@link AttributeDirective} that assigns a `value` property to an HTMLTextAreaElement.
*
* @example
*
* const e = textarea("", { content: CONTENT("content") });
*
* @param v Value.
* @returns {@link AttributeDirective}
*/
export const CONTENT = (
VALUE
);
/**
* CHECKED function creates a {@link AttributeDirective} that assigns a `checked` property to an HTMLInputElement.
*
* @example
*
* const e = input("", { checked: CHECKED(true) });
*
* @param v Checked value.
* @returns {@link AttributeDirective}
*/
export const CHECKED = (v: boolean): AttributeDirective<boolean> => (
v ? CHECKED_TRUE : CHECKED_FALSE
);
/* tslint:disable:max-line-length */
/**
* Creates OpNode HTML element <a>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/a}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <a>
*/
export const a = /*#__PURE__*/htmlElementFactory<HTMLAnchorElementAttrs, HTMLAnchorElement>("a");
/**
* Creates OpNode HTML element <abbr>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/abbr}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <abbr>
*/
export const abbr = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("abbr");
/**
* Creates OpNode HTML element <address>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/address}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <address>
*/
export const address = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("address");
/**
* Creates OpNode HTML element <article>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/article}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <article>
*/
export const article = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("article");
/**
* Creates OpNode HTML element <aside>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/aside}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <aside>
*/
export const aside = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("aside");
/**
* Creates OpNode HTML element <b>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/b}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <b>
*/
export const b = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("b");
/**
* Creates OpNode HTML element <base>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/base}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <base>
*/
export const base = /*#__PURE__*/htmlElementFactory<HTMLBaseElementAttrs, HTMLBaseElement>("base");
/**
* Creates OpNode HTML element <bdo>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/bdo}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <bdo>
*/
export const bdo = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("bdo");
/**
* Creates OpNode HTML element <blockquote>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/blockquote}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <blockquote>
*/
export const blockquote = /*#__PURE__*/htmlElementFactory<HTMLQuoteElementAttrs, HTMLQuoteElement>("blockquote");
/**
* Creates OpNode HTML element <body>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/body}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <body>
*/
export const body = /*#__PURE__*/htmlElementFactory<HTMLBodyElementAttrs, HTMLBodyElement>("body");
/**
* Creates OpNode HTML element <br>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/br}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <br>
*/
export const br = /*#__PURE__*/htmlElementFactory<HTMLBRElementAttrs, HTMLBRElement>("br");
/**
* Creates OpNode HTML element <button>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/button}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <button>
*/
export const button = /*#__PURE__*/htmlElementFactory<HTMLButtonElementAttrs, HTMLButtonElement>("button");
/**
* Creates OpNode HTML element <canvas>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/canvas}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <canvas>
*/
export const canvas = /*#__PURE__*/htmlElementFactory<HTMLCanvasElementAttrs, HTMLCanvasElement>("canvas");
/**
* Creates OpNode HTML element <caption>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/caption}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <caption>
*/
export const caption = /*#__PURE__*/htmlElementFactory<HTMLTableCaptionElementAttrs, HTMLTableCaptionElement>("caption");
/**
* Creates OpNode HTML element <cite>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/cite}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <cite>
*/
export const cite = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("cite");
/**
* Creates OpNode HTML element <code>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/code}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <code>
*/
export const code = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("code");
/**
* Creates OpNode HTML element <col>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/col}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <col>
*/
export const col = /*#__PURE__*/htmlElementFactory<HTMLTableColElementAttrs, HTMLTableColElement>("col");
/**
* Creates OpNode HTML element <colgroup>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/colgroup}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <colgroup>
*/
export const colgroup = /*#__PURE__*/htmlElementFactory<HTMLTableColElementAttrs, HTMLTableColElement>("colgroup");
/**
* Creates OpNode HTML element <del>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/del}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <del>
*/
export const del = /*#__PURE__*/htmlElementFactory<HTMLModElementAttrs, HTMLModElement>("del");
/**
* Creates OpNode HTML element <dfn>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/dfn}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <dfn>
*/
export const dfn = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("dfn");
/**
* Creates OpNode HTML element <div>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/div}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <div>
*/
export const div = /*#__PURE__*/htmlElementFactory<HTMLDivElementAttrs, HTMLDivElement>("div");
/**
* Creates OpNode HTML element <dd>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/dd}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <dd>
*/
export const dd = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("dd");
/**
* Creates OpNode HTML element <dl>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/dl}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <dl>
*/
export const dl = /*#__PURE__*/htmlElementFactory<HTMLDListElementAttrs, HTMLDListElement>("dl");
/**
* Creates OpNode HTML element <dt>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/dt}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <dt>
*/
export const dt = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("dt");
/**
* Creates OpNode HTML element <em>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/em}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <em>
*/
export const em = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("em");
/**
* Creates OpNode HTML element <fieldset>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/fieldset}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <fieldset>
*/
export const fieldset = /*#__PURE__*/htmlElementFactory<HTMLFieldSetElementAttrs, HTMLFieldSetElement>("fieldset");
/**
* Creates OpNode HTML element <figcaption>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/figcaption}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <figcaption>
*/
export const figcaption = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("figcaption");
/**
* Creates OpNode HTML element <figure>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/figure}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <figure>
*/
export const figure = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("figure");
/**
* Creates OpNode HTML element <footer>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/footer}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <footer>
*/
export const footer = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("footer");
/**
* Creates OpNode HTML element <form>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/form}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <form>
*/
export const form = /*#__PURE__*/htmlElementFactory<HTMLFormElementAttrs, HTMLFormElement>("form");
/**
* Creates OpNode HTML element <h1>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h1}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h1>
*/
export const h1 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h1");
/**
* Creates OpNode HTML element <h2>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h2}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h2>
*/
export const h2 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h2");
/**
* Creates OpNode HTML element <h3>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h3}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h3>
*/
export const h3 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h3");
/**
* Creates OpNode HTML element <h4>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h4}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h4>
*/
export const h4 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h4");
/**
* Creates OpNode HTML element <h5>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h5}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h5>
*/
export const h5 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h5");
/**
* Creates OpNode HTML element <h6>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/h6}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <h6>
*/
export const h6 = /*#__PURE__*/htmlElementFactory<HTMLHeadingElementAttrs, HTMLHeadingElement>("h6");
/**
* Creates OpNode HTML element <head>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/head}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <head>
*/
export const head = /*#__PURE__*/htmlElementFactory<HTMLHeadElementAttrs, HTMLHeadElement>("head");
/**
* Creates OpNode HTML element <header>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/header}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <header>
*/
export const header = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("header");
/**
* Creates OpNode HTML element <hgroup>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/hgroup}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <hgroup>
*/
export const hgroup = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("hgroup");
/**
* Creates OpNode HTML element <hr>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/hr}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <hr>
*/
export const hr = /*#__PURE__*/htmlElementFactory<HTMLHRElementAttrs, HTMLHRElement>("hr");
/**
* Creates OpNode HTML element <html>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/html}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <html>
*/
export const html = /*#__PURE__*/htmlElementFactory<HTMLHtmlElementAttrs, HTMLHtmlElement>("html");
/**
* Creates OpNode HTML element <i>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/i}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <i>
*/
export const i = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("i");
/**
* Creates OpNode HTML element <iframe>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <iframe>
*/
export const iframe = /*#__PURE__*/htmlElementFactory<HTMLIFrameElementAttrs, HTMLIFrameElement>("iframe");
/**
* Creates OpNode HTML element <img>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/img}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <img>
*/
export const img = /*#__PURE__*/htmlElementFactory<HTMLImageElementAttrs, HTMLImageElement>("img");
/**
* Creates OpNode HTML element <area>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/area}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <area>
*/
export const area = /*#__PURE__*/htmlElementFactory<HTMLAreaElementAttrs, HTMLAreaElement>("area");
/**
* Creates OpNode HTML element <map>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/map}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <map>
*/
export const map = /*#__PURE__*/htmlElementFactory<HTMLMapElementAttrs, HTMLMapElement>("map");
/**
* Creates OpNode HTML element <ins>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ins}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <ins>
*/
export const ins = /*#__PURE__*/htmlElementFactory<HTMLModElementAttrs, HTMLModElement>("ins");
/**
* Creates OpNode HTML element <kbd>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/kbd}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <kbd>
*/
export const kbd = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("kbd");
/**
* Creates OpNode HTML element <label>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/label}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <label>
*/
export const label = /*#__PURE__*/htmlElementFactory<HTMLLabelElementAttrs, HTMLLabelElement>("label");
/**
* Creates OpNode HTML element <legend>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/legend}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <legend>
*/
export const legend = /*#__PURE__*/htmlElementFactory<HTMLLegendElementAttrs, HTMLLegendElement>("legend");
/**
* Creates OpNode HTML element <li>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/li}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <li>
*/
export const li = /*#__PURE__*/htmlElementFactory<HTMLLIElementAttrs, HTMLLIElement>("li");
/**
* Creates OpNode HTML element <link>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/link}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <link>
*/
export const link = /*#__PURE__*/htmlElementFactory<HTMLLinkElementAttrs, HTMLLinkElement>("link");
/**
* Creates OpNode HTML element <main>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/main}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <main>
*/
export const main = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("main");
/**
* Creates OpNode HTML element <mark>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/mark}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <mark>
*/
export const mark = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("mark");
/**
* Creates OpNode HTML element <menu>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/menu}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <menu>
*/
export const menu = /*#__PURE__*/htmlElementFactory<HTMLMenuElementAttrs, HTMLMenuElement>("menu");
/**
* Creates OpNode HTML element <meta>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <meta>
*/
export const meta = /*#__PURE__*/htmlElementFactory<HTMLMetaElementAttrs, HTMLMetaElement>("meta");
/**
* Creates OpNode HTML element <meter>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meter}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <meter>
*/
export const meter = /*#__PURE__*/htmlElementFactory<HTMLMeterElementAttrs, HTMLMeterElement>("meter");
/**
* Creates OpNode HTML element <nav>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/nav}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <nav>
*/
export const nav = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("nav");
/**
* Creates OpNode HTML element <noscript>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/noscript}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <noscript>
*/
export const noscript = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("noscript");
/**
* Creates OpNode HTML element <ol>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ol}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <ol>
*/
export const ol = /*#__PURE__*/htmlElementFactory<HTMLOListElementAttrs, HTMLOListElement>("ol");
/**
* Creates OpNode HTML element <optgroup>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/optgroup}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <optgroup>
*/
export const optgroup = /*#__PURE__*/htmlElementFactory<HTMLOptGroupElementAttrs, HTMLOptGroupElement>("optgroup");
/**
* Creates OpNode HTML element <option>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/option}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <option>
*/
export const option = /*#__PURE__*/htmlElementFactory<HTMLOptionElementAttrs, HTMLOptionElement>("option");
/**
* Creates OpNode HTML element <p>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/p}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <p>
*/
export const p = /*#__PURE__*/htmlElementFactory<HTMLParagraphElementAttrs, HTMLParagraphElement>("p");
/**
* Creates OpNode HTML element <picture>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/picture}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <picture>
*/
export const picture = /*#__PURE__*/htmlElementFactory<HTMLPictureElementAttrs, HTMLPictureElement>("picture");
/**
* Creates OpNode HTML element <pre>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/pre}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <pre>
*/
export const pre = /*#__PURE__*/htmlElementFactory<HTMLPreElementAttrs, HTMLPreElement>("pre");
/**
* Creates OpNode HTML element <progress>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/progress}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <progress>
*/
export const progress = /*#__PURE__*/htmlElementFactory<HTMLProgressElementAttrs, HTMLProgressElement>("progress");
/**
* Creates OpNode HTML element <q>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/q}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <q>
*/
export const q = /*#__PURE__*/htmlElementFactory<HTMLQuoteElementAttrs, HTMLQuoteElement>("q");
/**
* Creates OpNode HTML element <rt>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/rt}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <rt>
*/
export const rt = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("rt");
/**
* Creates OpNode HTML element <ruby>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ruby}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <ruby>
*/
export const ruby = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("ruby");
/**
* Creates OpNode HTML element <s>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/s}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <s>
*/
export const s = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("s");
/**
* Creates OpNode HTML element <samp>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/samp}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <samp>
*/
export const samp = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("samp");
/**
* Creates OpNode HTML element <script>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <script>
*/
export const script = /*#__PURE__*/htmlElementFactory<HTMLScriptElementAttrs, HTMLScriptElement>("script");
/**
* Creates OpNode HTML element <section>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/section}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <section>
*/
export const section = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("section");
/**
* Creates OpNode HTML element <select>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <select>
*/
export const select = /*#__PURE__*/htmlElementFactory<HTMLSelectElementAttrs, HTMLSelectElement>("select");
/**
* Creates OpNode HTML element <source>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/source}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <source>
*/
export const source = /*#__PURE__*/htmlElementFactory<HTMLSourceElementAttrs, HTMLSourceElement>("source");
/**
* Creates OpNode HTML element <span>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/span}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <span>
*/
export const span = /*#__PURE__*/htmlElementFactory<HTMLSpanElementAttrs, HTMLSpanElement>("span");
/**
* Creates OpNode HTML element <strong>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/strong}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <strong>
*/
export const strong = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("strong");
/**
* Creates OpNode HTML element <style>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/style}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <style>
*/
export const style = /*#__PURE__*/htmlElementFactory<HTMLStyleElementAttrs, HTMLStyleElement>("style");
/**
* Creates OpNode HTML element <sub>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/sub}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <sub>
*/
export const sub = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("sub");
/**
* Creates OpNode HTML element <sup>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/sup}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <sup>
*/
export const sup = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("sup");
/**
* Creates OpNode HTML element <table>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/table}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <table>
*/
export const table = /*#__PURE__*/htmlElementFactory<HTMLTableElementAttrs, HTMLTableElement>("table");
/**
* Creates OpNode HTML element <tbody>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/tbody}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <tbody>
*/
export const tbody = /*#__PURE__*/htmlElementFactory<HTMLTableSectionElementAttrs, HTMLTableSectionElement>("tbody");
/**
* Creates OpNode HTML element <td>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/td}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <td>
*/
export const td = /*#__PURE__*/htmlElementFactory<HTMLTableDataCellElementAttrs, HTMLTableDataCellElement>("td");
/**
* Creates OpNode HTML element <template>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/template}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <template>
*/
export const template = /*#__PURE__*/htmlElementFactory<HTMLTemplateElementAttrs, HTMLTemplateElement>("template");
/**
* Creates OpNode HTML element <tfoot>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/tfoot}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <tfoot>
*/
export const tfoot = /*#__PURE__*/htmlElementFactory<HTMLTableSectionElementAttrs, HTMLTableSectionElement>("tfoot");
/**
* Creates OpNode HTML element <th>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/th}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <th>
*/
export const th = /*#__PURE__*/htmlElementFactory<HTMLTableHeaderCellElementAttrs, HTMLTableHeaderCellElement>("th");
/**
* Creates OpNode HTML element <thead>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/thead}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <thead>
*/
export const thead = /*#__PURE__*/htmlElementFactory<HTMLTableSectionElementAttrs, HTMLTableSectionElement>("thead");
/**
* Creates OpNode HTML element <time>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/time}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <time>
*/
export const time = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("time");
/**
* Creates OpNode HTML element <title>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/title}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <title>
*/
export const title = /*#__PURE__*/htmlElementFactory<HTMLTitleElementAttrs, HTMLTitleElement>("title");
/**
* Creates OpNode HTML element <tr>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/tr}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <tr>
*/
export const tr = /*#__PURE__*/htmlElementFactory<HTMLTableRowElementAttrs, HTMLTableRowElement>("tr");
/**
* Creates OpNode HTML element <track>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/track}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <track>
*/
export const track = /*#__PURE__*/htmlElementFactory<HTMLTrackElementAttrs, HTMLTrackElement>("track");
/**
* Creates OpNode HTML element <u>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/u}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <u>
*/
export const u = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("u");
/**
* Creates OpNode HTML element <ul>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ul}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <ul>
*/
export const ul = /*#__PURE__*/htmlElementFactory<HTMLUListElementAttrs, HTMLUListElement>("ul");
/**
* Creates OpNode HTML element <wbr>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/wbr}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <wbr>
*/
export const wbr = /*#__PURE__*/htmlElementFactory<HTMLElementAttrs, HTMLElement>("wbr");
/**
* Creates OpNode HTML element <textarea>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/textarea}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <textarea>
*/
export const textarea = /*#__PURE__*/htmlElementFactory<HTMLTextAreaElementAttrs, HTMLTextAreaElement>("textarea");
/**
* Creates OpNode HTML element <input>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <input>
*/
export const input = /*#__PURE__*/htmlElementFactory<HTMLInputElementAttrs, HTMLInputElement>("input");
/**
* Creates OpNode HTML element <audio>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/audio}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <audio>
*/
export const audio = /*#__PURE__*/htmlElementFactory<HTMLAudioElementAttrs, HTMLAudioElement>("audio");
/**
* Creates OpNode HTML element <video>.
*
* {@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video}
*
* @param className Class name.
* @param attrs Attributes.
* @param children Children nodes.
* @returns OpNode HTML element <video>
*/
export const video = /*#__PURE__*/htmlElementFactory<HTMLVideoElementAttrs, HTMLVideoElement>("video"); | the_stack |
import {HttpClient} from "@angular/common/http";
import {Component, EventEmitter, Inject, Input, OnChanges, OnDestroy, OnInit, Output, SimpleChanges, ViewChild, ViewContainerRef} from "@angular/core";
import {FormControl, FormGroup} from "@angular/forms";
import {MatDialogConfig} from "@angular/material/dialog";
import {MatSnackBar} from "@angular/material/snack-bar";
import {MatStepper} from "@angular/material/stepper";
import {TdDialogService} from "@covalent/core/dialogs";
import {TdLoadingService} from "@covalent/core/loading";
import "rxjs/add/observable/from";
import 'rxjs/add/observable/fromPromise';
import 'rxjs/add/observable/of';
import 'rxjs/add/operator/debounceTime';
import 'rxjs/add/operator/do';
import "rxjs/add/operator/filter";
import 'rxjs/add/operator/map';
import "rxjs/add/operator/switchMap";
import 'rxjs/add/operator/take';
import 'rxjs/add/operator/toPromise';
import {Observable} from "rxjs/Observable";
import {catchError, debounceTime, finalize, map, switchMap, tap} from 'rxjs/operators';
import {Subject} from "rxjs/Subject";
import {ISubscription} from "rxjs/Subscription";
import * as _ from "underscore";
import {SideNavService} from "../../../services/SideNavService";
import {DatasetPreviewStepperDialogComponent, DatasetPreviewStepperDialogData} from "../../catalog-dataset-preview/preview-stepper/dataset-preview-stepper-dialog.component";
import {DatasetPreviewStepperSavedEvent} from "../../catalog-dataset-preview/preview-stepper/dataset-preview-stepper.component";
import {DatasetTable} from "../../catalog/api/models/dataset-table";
import {DataSource} from "../../catalog/api/models/datasource";
import {CatalogService} from "../../catalog/api/services/catalog.service";
import {TableColumn} from "../../catalog/datasource/preview-schema/model/table-view-model";
import {FeedDataTransformation} from "../../model/feed-data-transformation";
import {SparkDataSet} from "../../model/spark-data-set.model";
import {UserDatasource} from "../../model/user-datasource";
import {DatasourcesService} from "../../services/DatasourcesService";
import {HiveService} from "../../services/HiveService";
import {VisualQueryService} from "../../services/VisualQueryService";
import {DatasourcesServiceStatic, SchemaField, TableSchema} from "../wrangler";
import {QueryEngine} from "../wrangler/query-engine";
import {ConnectionDialog, ConnectionDialogConfig, ConnectionDialogResponse, ConnectionDialogResponseStatus} from "./connection-dialog/connection-dialog.component";
import {FlowChartComponent} from "./flow-chart/flow-chart.component";
import {FlowChart} from "./flow-chart/model/flow-chart.model";
/**
* Code for the delete key.
*/
const DELETE_KEY_CODE = 46;
/**
* Code for control key.
*/
const CTRL_KEY_CODE = 17;
/**
* Code for A key.
*/
const A_KEY_CODE = 65;
/**
* Code for esc key.
*/
const ESC_KEY_CODE = 27;
/**
* Displays the Build Query step of the Visual Query page.
*
* There are two modes for how the user may build their query:
*
* - Visual Mode - (default) A {@code QueryEngine} is used to retrieve a list of tables and the schema is displayed in a flow chart. The nodes in the flow chart can be connected to create joins
* between tables.
*
* - Advanced Mode - A textarea is provided for the user to input their query.
*/
@Component({
selector: 'build-query-ng2',
styleUrls: ["./build-query-ng2.component.css"],
templateUrl: "./build-query-ng2.component.html",
host: {
'(document:keyup)': '_keyup($event)',
'(document:keydown)': '_keydown($event)',
}
})
export class BuildQueryComponent implements OnDestroy, OnChanges, OnInit {
/**
* Query engine for determining capabilities.
*/
@Input()
engine: QueryEngine<any>;
/**
* Data transformation model
*/
@Input()
model: FeedDataTransformation;
/**
* Flag to show the datasource drop down for autocomplete
*/
@Input()
showDatasources?: boolean = true;
@Input()
showCancel?:boolean = true;
@Input()
stepper: MatStepper;
/**
* The form for the page
*/
@Input()
form: FormGroup;
/**
* Indicates if query execution failed
*/
@Input()
queryExecutionFailure:boolean;
/**
* Event emitted to return to the previous step
*/
@Output()
back = new EventEmitter<void>();
/**
* Event emitted to advance to the next step
*/
@Output()
next = new EventEmitter<void>();
/**
* Event emitted to indicate sql was changed
*/
@Output()
sqlChange = new EventEmitter<string>();
/**
* Indicates if the UI is in advanced mode
*/
advancedMode: boolean = false;
/**
* Text indicating which node to switch to
*/
advancedModeText: string;
/**
* List of data sources to display.
*/
allDatasources: UserDatasource[] = [];
/**
* List of data sources to display.
*/
availableDatasources: UserDatasource[] = [];
availableSQLDatasources: UserDatasource[] = [];
/**
*
* @type {any[]}
*/
availableCatalogSQLDataSources: DataSource[] = [];
availableCatalogSqlDataSourceIds:string[];
/**
* the id representing the Hive datasourceIds
*/
hiveCatalogDataSourceIds:string[];
/**
* Model for the chart.
*/
chartViewModel: any = {data: {nodes: []}, nodes: [], connections: []}
/**
* Indicates that there was an error retrieving the list of tables.
* @type {boolean} true if there was an error or false otherwise
*/
databaseConnectionError: boolean = false;
/**
* Error message to be displayed.
*/
error: string;
/**
* Height offset from the top of the page.
*/
@Input()
heightOffset: number;
/**
* Indicates if the model is valid.
*/
isValid: boolean = false;
/**
* Indicates that the page is being loaded.
*/
loadingPage: boolean = true;
/**
* Indicates that a table schema is being loaded.
*/
loadingSchema: boolean = false;
/**
* Next node id.
*/
nextNodeID = 10;
/**
* List of the data sources used in model.
* @type {Array.<string>}
*/
selectedDatasourceIds: string[] = [];
/**
* holds the metadata about each column and table that is used to build the SQL str in the getSQLModel() method
*/
selectedColumnsAndTables: any = [];
/**
* list of catalog datasources used in this model
* @type {any[]}
*/
selectedCatalogDatsSourceIds: string[] = []
@ViewChild("flowChart")
flowChart: FlowChartComponent;
selectedTable: string;
/**
* Aysnc autocomplete list of tables
*/
public filteredTables: DatasourcesServiceStatic.TableReference[] =[];
/**
* List of native data sources to exclude from the model.
*/
private nativeDataSourceIds: string[] = [];
private fileDataSource: UserDatasource = {id: "FILE", name: "Local File", description: "Local File", type: "File"}
/**
* flag to indicate the ctrl key is pressed
*/
ctrlDown: boolean;
//callbacks
onCreateConnectionSubscription: ISubscription;
onEditConnectionSubscription: ISubscription;
onDeleteConnectionSubscription: ISubscription;
autocompleteLoading:boolean = false;
autocompleteNoDataFound:boolean = false;
/**
* flag when the query engine is loaded
*/
engineLoaded:boolean = false;
/**
* Constructs a {@code BuildQueryComponent}.
* private hiveService: HiveService, private sideNavService: SideNavService,
private visualQueryService: VisualQueryService, private datasourcesService: DatasourcesService,
*/
constructor(private _dialogService: TdDialogService,
private viewContainerRef: ViewContainerRef,
private _loadingService: TdLoadingService,
@Inject("HiveService") private hiveService: HiveService,
@Inject("SideNavService") private sideNavService: SideNavService,
@Inject("VisualQueryService") private visualQueryService: VisualQueryService,
@Inject("DatasourcesService") private datasourcesService: DatasourcesService,
private catalogService:CatalogService,
private http:HttpClient,
private snackBar: MatSnackBar) {
// Setup environment
this.sideNavService.hideSideNav();
}
private initFormComponents() {
if (this.form == undefined) {
this.form = new FormGroup({});
}
if (this.showDatasources) {
let datasource = new FormControl();
this.form.addControl("datasource", datasource);
datasource.valueChanges.subscribe((datasourceId: string) => {
this.model.$catalogDataSourceId = datasourceId;
this.onDatasourceChange();
});
let tableAutocomplete = new FormControl();
this.form.addControl("tableAutocomplete", tableAutocomplete);
let searchTerm = "";
tableAutocomplete.valueChanges
.pipe(
debounceTime(300),
tap(() => {
this.autocompleteLoading = true;
this.databaseConnectionError = false;
this.autocompleteNoDataFound = false;
}),
switchMap(text => {
searchTerm = text;
return this.onAutocompleteQuerySearch(text)
.pipe(
catchError( () => {
this.databaseConnectionError = true
return Observable.of([])
}),
finalize(() => this.autocompleteLoading = false))
}
)).subscribe((results:DatasourcesServiceStatic.TableReference[]) => {
this.filteredTables = results
if(searchTerm && searchTerm != "" && this.filteredTables.length == 0){
this.autocompleteNoDataFound = true;
}
else {
this.autocompleteNoDataFound = false;
}
});
}
}
/**
* Verifies the user selected a valid table
*/
checkValidSelection() : void {
this.selectedTable = null;
let table = this.selectedTableOption();
if (table) {
// User selected option from list so must be valid
if (table.hasOwnProperty("fullName")) {
this.selectedTable = table;
} else {
/*
if (table.indexOf(".") > -1) {
// User may have manually typed the full name so need to validate
let tableParts = table.split(".");
let schemaPart = tableParts[0];
let tablePart = tableParts[1];
this.onAutocompleteQuerySearch(tablePart).then((tables:any[]) => {
if (tables.length > 0) {
this.selectedTable = tables.find((v:any)=>{
return (v.fullName == table);
});
}
});
}
*/
}
}
}
/**
* Function for the Autocomplete to display the name of the table object matched
* @param {TableReference} table
* @return {string | undefined}
*/
tableAutocompleteDisplay(table?: DatasourcesServiceStatic.TableReference): string | undefined {
return table ? table.fullName : undefined;
}
/**
* Get or set the SQL for the advanced mode.
*/
advancedModeSql(sql: string = null) {
if (sql !== null) {
this.model.sql = sql;
this.validate();
}
this.sqlChange.emit(sql);
return this.model.sql;
}
/**
* Indicates if the active datasource can be changed.
*/
canChangeDatasource(): boolean {
return (this.error == null && (this.engine.allowMultipleDataSources || this.selectedDatasourceIds.length === 0));
}
selectedTableOption() : any {
return this.form.contains('tableAutocomplete') ? this.form.get('tableAutocomplete').value : undefined;
}
/**
* Adds the table to the FlowChart.
*/
onAddTable() {
this.sideNavService.hideSideNav();
let table = this.selectedTable;
if (table) {
this.onTableClick(table);
this.form.get('tableAutocomplete').reset('');
this.selectedTable = null;
}
}
private fetchCatalogDataSources() :Observable<DataSource[]>{
return this.catalogService.getDataSourcesForPluginIds(["hive","jdbc"])
.pipe(map(datasources => {
this.availableCatalogSqlDataSourceIds = []
if(datasources && datasources.length >0){
this.availableCatalogSQLDataSources = _(datasources).chain().sortBy( (ds:DataSource) =>{
return ds.title;
}).sortBy((ds:DataSource) =>{
return ds.connector.pluginId;
}).value()
this.availableCatalogSQLDataSources.forEach(ds => {
if(this.availableCatalogSqlDataSourceIds.indexOf(ds.id) <0){
this.availableCatalogSqlDataSourceIds.push(ds.id);
}
})
}
else {
this.availableCatalogSQLDataSources = [];
}
return this.availableCatalogSQLDataSources;
}));
}
/**
* Initialize state from services.
*/
private init() :Observable<UserDatasource[]> {
let datasources$ = new Subject<UserDatasource[]>();
// Get the list of data sources
Promise.all([this.engine.getNativeDataSources(), this.datasourcesService.findAll(), this.fetchCatalogDataSources().toPromise()])
.then(resultList => {
this.nativeDataSourceIds = resultList[0].map((dataSource: UserDatasource): string => dataSource.id);
const supportedDatasources = resultList[0].concat(resultList[1]).filter(this.engine.supportsDataSource);
if (supportedDatasources.length > 0) {
return supportedDatasources;
} else {
const supportedNames = ((supportedNameList) => {
if (supportedNameList.length === 0) {
return "";
} else if (supportedNameList.length === 1) {
return `Please create a ${supportedNameList[0]} data source and try again.`;
} else {
return `Please create one of the following data sources and try again: ${supportedNameList.join(", ")}`;
}
})(this.engine.getSupportedDataSourceNames());
throw new Error("No supported data sources were found. " + supportedNames);
}
})
.then((datasources: UserDatasource[]) => {
this.updateAvailableDatasources(datasources);
this.allDatasources = datasources;
/*
if (this.model.$selectedDatasourceId == null) {
this.model.$selectedDatasourceId = datasources[0].id;
this.form.get("datasource").setValue(this.model.$selectedDatasourceId);
}
*/
this.validate();
datasources$.next(datasources);
})
.catch((err: any) => {
console.error(err);
this.error = err;
datasources$.error(err);
})
.then(() => {
this.loadingPage = false;
});
return datasources$.asObservable();
}
private updateAvailableDatasources(datasources?:UserDatasource[]){
if(datasources ) {
this.availableDatasources = datasources;
}
}
private _keydown(evt: KeyboardEvent) {
if (evt.keyCode === CTRL_KEY_CODE) {
this.ctrlDown = true;
evt.stopPropagation();
evt.preventDefault();
}
}
private _keyup(evt: KeyboardEvent) {
if (evt.keyCode === DELETE_KEY_CODE) {
//
// Delete key.
//
this.chartViewModel.deleteSelected();
this.validate();
}
if (evt.keyCode == A_KEY_CODE && this.ctrlDown) {
//
// Ctrl + A
//
this.chartViewModel.selectAll();
}
if (evt.keyCode == ESC_KEY_CODE) {
// Escape.
this.chartViewModel.deselectAll();
}
if (evt.keyCode === CTRL_KEY_CODE) {
this.ctrlDown = false;
evt.stopPropagation();
evt.preventDefault();
}
}
addPreviewDataSets() {
if (this.model.datasets && this.model.datasets.length > 0) {
this.addSparkDataSets(this.model.datasets)
}
}
isDataSetUserDataSource(dataSet:SparkDataSet){
if(dataSet == undefined){
return false;
}
return this.allDatasources.find(ds => ds.id == dataSet.dataSource.id) != undefined;
}
ensureDataSetId(dataset:SparkDataSet) :Observable<SparkDataSet>{
return this.catalogService.ensureDataSetId(dataset);
}
ensureDataSetIds(datasets:SparkDataSet[]) :Observable<SparkDataSet>[]{
return datasets.filter(dataset => typeof dataset.preview !== "undefined")
.map(dataset => this.ensureDataSetId(dataset))
}
/**
* Strip array and struct detail
*/
shortenComplex(dataType:String) {
let idx = dataType.indexOf("<");
return (idx > -1 ? dataType.substr(0, idx) : dataType);
}
addSparkDataSets(datasets:SparkDataSet[]) {
if(datasets && datasets.length >0) {
Observable.forkJoin(this.ensureDataSetIds(datasets)).subscribe((dataSets:SparkDataSet[]) => {
let error = false;
dataSets.forEach((dataset: SparkDataSet) => {
let tableSchema: any = {};
tableSchema.schemaName = dataset.getSchemaName();
tableSchema.tableName = dataset.getTableName();
tableSchema.name = dataset.getTableName();
if (dataset.schema) {
tableSchema.fields = dataset.schema.map(tableColumn => {
let field: any = {};
field.name = tableColumn.name;
field.description = null;
field.nativeDataType = tableColumn.dataType;
field.derivedDataType = tableColumn.dataType;
field.dataTypeWithPrecisionAndScale = this.shortenComplex(tableColumn.dataType);
return field;
});
let nodeName = dataset.getDisplayIdentifier();
this.addDataSetToCanvas(null, nodeName, tableSchema, dataset); //dataset.dataSource.id
} else {
error = true;
}
});
this.loadingPage = false;
if (error) {
this.snackBar.open("Failed to load schema. Please try again.", "OK", {duration: 5000});
}
});
}
}
/**
* Initialize the model for the FlowChart.
*/
setupFlowChartModel() {
// Load data model
let chartDataModel: FlowChart.ChartDataModel;
if (this.model.chartViewModel != null) {
chartDataModel = this.model.chartViewModel;
} else {
chartDataModel = new FlowChart.ChartDataModel();
}
// Prepare nodes
_.each(chartDataModel.nodes, (node: any) => {
// Update to catalog data sources
if (node.datasourceId !== "HIVE" && typeof(node.dataset) === "undefined") {
node.dataset = this.model.datasets
.find(dataSet => dataSet.dataSource.id == node.datasourceId && dataSet.options != null && dataSet.options.dbtable != null && dataSet.options.dbtable == node.name);
}
// Add utility functions
this.prepareNode(node);
// Determine next node ID
this.nextNodeID = Math.max(node.id + 1, this.nextNodeID);
});
this.ensureConnectionKeys(chartDataModel);
// Create view model
this.chartViewModel = new FlowChart.ChartViewModel(chartDataModel);
this.onCreateConnectionSubscription = this.chartViewModel.onCreateConnection$.subscribe(this.onCreateConnectionCallback.bind(this));
this.onEditConnectionSubscription = this.chartViewModel.onEditConnection$.subscribe(this.onEditConnectionCallback.bind(this));
this.onDeleteConnectionSubscription = this.chartViewModel.onDeleteSelected$.subscribe(this.onDeleteSelectedCallback.bind(this))
}
onDatasourceChange() {
//clear the autocomplete
this.form.get('tableAutocomplete').reset('');
this.model.sampleFile = null;
this.engine.setSampleFile(null);
}
/**
* Called after a user Adds a table to fetch the Columns and datatypes.
* @param schema - the schema name
* @param table - the table name
*/
private getTableSchema(schema: string, table: string): Promise<TableSchema> {
return this.engine.getTableSchema(schema, table, this.model.$selectedDatasourceId)
.then((tableSchema: TableSchema) => {
this.loadingSchema = false;
return tableSchema;
});
}
private getCatalogDataSources():DataSource[] {
const datasourceIds:string[] = [];
const $catalogDataSources:DataSource[] = [];
//save the catalog datasource ids????
if(this.model.$catalogDataSourceId == undefined && this.availableCatalogSQLDataSources && this.availableCatalogSQLDataSources.length){
if(this.model.datasets) {
this.model.datasets
.filter(dataSet => dataSet.dataSource && dataSet.dataSource.connector && dataSet.dataSource.connector.pluginId == "jdbc")
.map(dataSet => dataSet.dataSource)
.forEach(dataSource => {
if(datasourceIds.indexOf(dataSource.id) <0) {
datasourceIds.push(dataSource.id);
$catalogDataSources.push(dataSource);
}
})
}
} else if(this.advancedMode && this.model.$catalogDataSourceId != undefined && this.availableCatalogSQLDataSources && this.availableCatalogSQLDataSources.length) {
const ds = this.availableCatalogSQLDataSources.find(ds => ds.id == this.model.$catalogDataSourceId);
if(ds != null && ds != undefined) {
if(ds.connector.pluginId == "jdbc" && datasourceIds.indexOf(ds.id) <0) {
datasourceIds.push(ds.id);
$catalogDataSources.push(ds);
}
}
}
return $catalogDataSources;
}
/**
* Validate the canvas.
* If there is at least one table defined, it is valid
* TODO enhance to check if there are any tables without connections
*/
private validate() {
if (this.advancedMode) {
let sql = this.advancedModeSql();
this.isValid = (typeof(sql) !== "undefined" && sql.length > 0);
this.model.$selectedColumnsAndTables = null;
this.model.chartViewModel = null;
this.model.datasourceIds = this.model.$selectedDatasourceId != undefined && this.nativeDataSourceIds.indexOf(this.model.$selectedDatasourceId.toUpperCase()) < 0 ? [this.model.$selectedDatasourceId] : [];
this.model.$datasources = this.datasourcesService.filterArrayByIds(this.model.$selectedDatasourceId, this.availableDatasources);
this.model.$catalogDataSources = this.getCatalogDataSources();
if(this.model.$catalogDataSourceId == undefined){
if(this.availableCatalogSqlDataSourceIds != undefined
&& this.availableCatalogSqlDataSourceIds.length >0
&& this.model.catalogDataSourceIds
&& this.model.catalogDataSourceIds.length >0
&& this.availableCatalogSqlDataSourceIds.indexOf(this.model.catalogDataSourceIds[0]) >=0){
this.model.$catalogDataSourceId = this.model.catalogDataSourceIds[0];
}
else if(this.availableCatalogSqlDataSourceIds && this.availableCatalogSqlDataSourceIds.length >0){
this.model.$catalogDataSourceId = this.availableCatalogSqlDataSourceIds[0];
}
if(this.model.$catalogDataSourceId){
this.form.get("datasource").setValue(this.model.$catalogDataSourceId);
}
}
} else if (this.model.$selectedDatasourceId == 'FILE') {
this.isValid = this.model.sampleFile != undefined;
} else if (this.chartViewModel.nodes != null && this.chartViewModel.nodes.length > 0) {
this.isValid = true;
this.model.chartViewModel = this.chartViewModel.data;
this.model.sql = this.getSQLModel();
this.model.$selectedColumnsAndTables = this.selectedColumnsAndTables;
//mark the datasourceId if its not a catalog datasource
this.model.datasourceIds = this.selectedDatasourceIds.filter(id => this.nativeDataSourceIds.indexOf(id.toUpperCase()) < 0 && (this.availableCatalogSqlDataSourceIds != undefined && this.availableCatalogSqlDataSourceIds.indexOf(id) <0));
this.model.catalogDataSourceIds = this.selectedCatalogDatsSourceIds;
this.model.$catalogDataSources = this.getCatalogDataSources();
this.model.$datasources = this.datasourcesService.filterArrayByIds(this.selectedDatasourceIds, this.availableDatasources);
} else {
this.isValid = false;
}
}
private getNewXYCoord() {
let coord = {x: 20, y: 20};
//attempt to align it on the top
if (this.chartViewModel.data.nodes.length > 0) {
//constants
let yThreshold = 150;
let tableWidth = 250;
//reduce the set to just show those in the top row
let tables = _.filter(this.chartViewModel.data.nodes, (table: any) => {
return table.y <= yThreshold;
});
//sort by x then y (underscore sort is reverse thinking)
tables = _.chain(tables).sortBy('y').sortBy('x').value();
let lastX = coord.x;
_.some(tables, (table: any) => {
//if this table is within the top row
//move over to find the next X position on the top row that is open
if (table.x < lastX + tableWidth) {
lastX = table.x + table.width;
}
else {
//break out
return true;
}
});
if (lastX > 20) {
//add padding
lastX += 20;
}
coord.x = lastX;
}
return coord;
}
/**
* Turn on SQL mode.
*/
toggleAdvancedMode() {
if (this.advancedMode === false) {
let goAdvanced = () => {
this.advancedMode = true;
this.advancedModeText = "Visual Mode";
this.updateAvailableDatasources();
this.validate();
};
if (this.chartViewModel.nodes.length > 0) {
this._dialogService.openConfirm({
message: 'If you switch to the advanced SQL editor then you will no longer be able to return to this visual editor. Are you sure you want to continue?',
disableClose: true,
viewContainerRef: this.viewContainerRef, //OPTIONAL
title: 'Switch to advanced mode', //OPTIONAL, hides if not provided
cancelButton: 'Cancel', //OPTIONAL, defaults to 'CANCEL'
acceptButton: 'Continue', //OPTIONAL, defaults to 'ACCEPT'
width: '500px', //OPTIONAL, defaults to 400px
}).afterClosed().subscribe((accept: boolean) => {
if (accept) {
goAdvanced();
} else {
//nada
}
});
} else {
goAdvanced();
}
} else {
this.advancedMode = false;
this.model.sql = "";
this.advancedModeText = "Advanced Mode";
this.updateAvailableDatasources();
}
};
ensureConnectionKeys(chartDataModel:FlowChart.ChartDataModel) {
if (chartDataModel.nodes) {
let nodeMap = {}
chartDataModel.nodes.forEach(node => nodeMap[node.id] = node);
if (chartDataModel.connections && chartDataModel.connections.length > 0) {
try {
chartDataModel.connections.forEach(connection => {
let sourceKey = connection["joinKeys"]["sourceKey"]
let destKey = connection["joinKeys"]["destKey"]
let sourceId = connection["source"]["nodeID"];
let destId = connection["dest"]["nodeID"];
let sourceNode = nodeMap[sourceId];
let destNode = nodeMap[destId];
let srcFields: string[] = <string[]>(<any[]>sourceNode["nodeAttributes"]["attributes"]).map(attr => attr["name"])
let destFields: string[] = <string[]>(<any[]>destNode["nodeAttributes"]["attributes"]).map(attr => attr["name"])
let validSource = srcFields.indexOf(sourceKey) >= 0;
let validDest = destFields.indexOf(destKey) >= 0;
if (!validSource && !validDest) {
validSource = srcFields.indexOf(destKey) >= 0;
validDest = destFields.indexOf(sourceKey) >= 0;
if (validSource && validDest) {
//flip them
connection["joinKeys"]["sourceKey"] = destKey;
connection["joinKeys"]["destKey"] = sourceKey;
}
}
})
}catch(err){
console.error("error assessing connection keys ",err)
}
}
}
}
/**
* Adds utility functions to a node data model.
*
* @param node - the node data model
*/
prepareNode(node: any) {
const self = this;
if(node.name){
//strip any "`" chars from the node name
node.name = node.name.replace(/`/g,"");
}
/**
* Indicates if all of the attributes are selected.
*
* @returns {@code true} if all attributes are selected, or {@code false} otherwise
*/
node.nodeAttributes.hasAllSelected = function (): boolean {
return _.every(this.attributes, function (attr: any) {
return attr.selected
});
};
/**
* Selects the specified attribute.
*
* @param attr - the attribute to be selected
*/
node.nodeAttributes.select = function (attr: any): void {
attr.selected = true;
this.selected.push(attr);
self.validate();
};
/**
* Selects all attributes.
*/
node.nodeAttributes.selectAll = function (): void {
let selected: any = [];
_.each(this.attributes, (attr: any) => {
attr.selected = true;
selected.push(attr);
});
this.selected = selected;
self.validate();
};
/**
* Deselects the specified attribute.
*
* @param attr - the attribute to be deselected
*/
node.nodeAttributes.deselect = function (attr: any): void {
attr.selected = false;
let idx = this.selected.indexOf(attr);
if (idx > -1) {
this.selected.splice(idx, 1);
}
self.validate();
};
/**
* Deselects all attributes.
*/
node.nodeAttributes.deselectAll = function (): void {
_.each(this.attributes, (attr: any) => {
attr.selected = false;
});
this.selected = [];
self.validate();
};
};
//
// Add a new node to the chart.
//
onTableClick(table: any) {
this.loadingPage = true;
//get attributes for table
const datasourceId = this.model.$catalogDataSourceId;
this.catalogService.createJdbcTableDataSet(datasourceId,table.schema,table.tableName).subscribe( (ds:DatasetTable) => {
let nodeName = ds.dataSet.title;
if(nodeName == null || nodeName == undefined){
nodeName = ds.tableSchema.schemaName+"."+ds.tableSchema.name;
}
let columns:TableColumn[] = ds.tableSchema.fields.map((schemaField:SchemaField)=> {
return {name:schemaField.name,label:schemaField.name,dataType:schemaField.derivedDataType,derivedDataType:schemaField.derivedDataType, nativeDataType:schemaField.nativeDataType}
});
const format = ds.dataSet.format;
let paths = [nodeName];
let dataSet = new SparkDataSet({id:ds.dataSet.id,dataSource:ds.dataSet.dataSource,title:ds.dataSet.title,format:format,schema:columns,options:{"dbtable":nodeName}, paths:paths});
this.addDataSetToCanvas(null, nodeName, ds.tableSchema, dataSet);
this.loadingPage = false;
}, error1 => {
this.loadingPage = false;
});
};
private addDataSetToCanvas(datasourceId: string, nodeName: string, tableSchema: TableSchema, dataset?: SparkDataSet) {
const coord = this.getNewXYCoord();
_.each(tableSchema.fields, (field: SchemaField) => {
field.selected = true;
if (this.engine.useNativeDataType) {
field.dataTypeWithPrecisionAndScale = field.nativeDataType.toLowerCase();
}
});
//strip any "`" chars from the node name
nodeName = nodeName.replace(/`/g,"");
const newNodeDataModel: any = {
name: nodeName,
id: this.nextNodeID++,
datasourceId: datasourceId,
dataset: dataset,
datasetMatchesUserDataSource: dataset == undefined || this.isDataSetUserDataSource(dataset),
x: coord.x,
y: coord.y,
nodeAttributes: {
attributes: tableSchema.fields,
reference: [tableSchema.schemaName, tableSchema.name],
selected: []
},
connectors: {
top: {},
bottom: {},
left: {},
right: {}
},
inputConnectors: [
{
name: ""
}
],
outputConnectors: [
{
name: ""
}
]
};
//ensure the dataset is part of the model
if(dataset){
if(_.isUndefined(this.model.datasets) || this.model.datasets === null){
this.model.datasets = [];
}
if(this.model.datasets.find(ds => ds.id == dataset.id) == undefined){
this.model.datasets.push(dataset);
}
}
this.prepareNode(newNodeDataModel);
this.chartViewModel.addNode(newNodeDataModel);
this.validate();
}
/**
* Parses the tables on the canvas and returns a SQL string, along with populating the self.selectedColumnsAndTables array of objects.
*
* @returns the SQL string or null if multiple data sources are used
*/
getSQLModel(): string | null {
let builder = VisualQueryService.sqlBuilder(this.chartViewModel.data, this.engine.sqlDialect);
let sql = builder.build();
this.selectedColumnsAndTables = builder.getSelectedColumnsAndTables();
this.selectedDatasourceIds = builder.getDatasourceIds();
this.selectedCatalogDatsSourceIds = builder.getCatalogDataSourceIds()
return sql;
}
/**
* When a connection is edited
*/
onEditConnectionCallback(response: FlowChart.ConnectionCallbackResponse) {
this.showConnectionDialog(false, response.connectionViewModel, response.connectionDataModel, response.src, response.dest);
};
/**
* When a connection is created
*/
onCreateConnectionCallback(response: FlowChart.ConnectionCallbackResponse) {
// Ensure connection is unique
let newDestID = response.dest.data.id;
let newSourceID = response.src.data.id;
for (let i = 0; i < this.chartViewModel.data.connections.length - 1; ++i) {
let oldDestID = this.chartViewModel.data.connections[i].dest.nodeID;
let oldSourceID = this.chartViewModel.data.connections[i].source.nodeID;
if ((oldDestID === newDestID && oldSourceID === newSourceID) || (oldDestID === newSourceID && oldSourceID === newDestID)) {
// Delete connection
this.chartViewModel.deselectAll();
response.connectionViewModel.select();
this.chartViewModel.deleteSelected();
// Display error message
this._dialogService.openAlert({
message: 'There is already a join between those two tables. Please edit the existing join or switch to advanced mode.',
viewContainerRef: this.viewContainerRef,
title: 'Duplicate join',
width: '500px',
});
return;
}
}
// Add connection
this.showConnectionDialog(true, response.connectionViewModel, response.connectionDataModel, response.src, response.dest);
this.validate();
};
/**
* Called when the current selection is deleted.
*/
onDeleteSelectedCallback() {
this.validate();
let datasets:SparkDataSet[] = [];
this.chartViewModel.data.nodes.forEach((node:any)=> {
if(node.dataset){
let datasetId = node.dataset.id;
if(datasets.find(ds => ds.id == datasetId) == undefined){
datasets.push(<SparkDataSet>node.dataset)
}
}
});
this.model.datasets = datasets;
};
showConnectionDialog(isNew: any, connectionViewModel: any, connectionDataModel: any, source: any, dest: any) {
this.chartViewModel.deselectAll();
let config: ConnectionDialogConfig = {
isNew: isNew,
connectionViewModel: connectionViewModel,
connectionDataModel: connectionDataModel,
source: source,
dest: dest
};
return this._dialogService.open(ConnectionDialog, {data: config})
.afterClosed().subscribe((response: ConnectionDialogResponse) => {
if(response) {
if (response.status == ConnectionDialogResponseStatus.DELETE || isNew && response.status == ConnectionDialogResponseStatus.CANCEL) {
connectionViewModel.select();
this.chartViewModel.deleteSelected();
}
else if (response.status == ConnectionDialogResponseStatus.SAVE) {
// connectionDataModel = response.connectionDataModel;
let viewConnection = this.chartViewModel.findConnection(response.id);
viewConnection.data.joinType = response.joinType;
viewConnection.data.name = response.connectionName;
viewConnection.data.joinKeys.sourceKey = response.source;
viewConnection.data.joinKeys.destKey = response.dest;
}
this.validate()
}
})
}
/**
* callback after a user selects a file from the local file system
*/
onFileUploaded() {
this.next.emit();
}
// -----------------
// Angular Callbacks
// -----------------
/**
* Cleanup environment when this directive is destroyed.
*/
ngOnDestroy(): void {
//cancel subscriptions
if (this.onCreateConnectionSubscription) {
this.onCreateConnectionSubscription.unsubscribe();
}
if (this.onEditConnectionSubscription) {
this.onEditConnectionSubscription.unsubscribe();
}
if (this.onDeleteConnectionSubscription) {
this.onDeleteConnectionSubscription.unsubscribe();
}
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.model && !changes.model.firstChange && this.engineLoaded) {
this.initModelAttributes();
// Setup the flowchart Model
this.setupFlowChartModel();
this.addPreviewDataSets();
// Validate when the page loads
this.validate();
}
}
private initModelAttributes(){
if (this.model.$selectedDatasourceId == null && this.model.datasourceIds && this.model.datasourceIds.length > 0) {
this.model.$selectedDatasourceId = this.model.datasourceIds[0];
}
// Allow for SQL editing
if (this.model.chartViewModel == null && typeof this.model.sql !== "undefined" && this.model.sql !== null && (_.isUndefined(this.model.sampleFile) || this.model.sampleFile == null)) {
this.advancedMode = true;
this.advancedModeText = "Visual Mode";
} else {
this.advancedMode = false;
this.advancedModeText = "Advanced Mode";
}
}
/**
* Finish initializing after data-bound properties are initialized.
*/
ngOnInit(): void {
//init the form objects
this.initFormComponents();
this.initModelAttributes();
this.autoCompleteEnabledCheck();
// Wait for query engine to load
const onLoad = () => {
// Initialize state
this.init().subscribe( (datasources:UserDatasource[]) => {
// Setup the flowchart Model
this.setupFlowChartModel();
this.addPreviewDataSets();
// Validate when the page loads
this.validate();
this.engineLoaded = true;
// Display Catalog for templates only using a sample file
if (typeof this.model.dataTransformScript === "string" && this.model.dataTransformScript.length > 0 && this.model.sql === "") {
this.openCatalogBrowser();
}
});
};
if (this.engine instanceof Promise) {
this.engine.then(queryEngine => {
this.engine = queryEngine;
onLoad();
});
} else {
onLoad();
}
}
/**
* Search the list of table names.
*/
onAutocompleteQuerySearch(txt: any): any {
let promise: Promise<DatasourcesServiceStatic.TableReference[]> = null;
if(txt == undefined ){
txt = "";
}
if(typeof txt == 'string') {
if(txt == "" || this.model.$catalogDataSourceId == undefined){
return Observable.of([]);
}
else {
return <Observable<DatasourcesServiceStatic.TableReference[]>> this.catalogService.listTables(this.model.$catalogDataSourceId, txt);
}
}
else {
this.checkValidSelection();
return Observable.of([txt]);
}
/*
const tables = this.engine.searchTableNames(txt, this.model.$selectedDatasourceId);
if (tables instanceof Promise) {
promise = tables.then((tables: any) => {
this.databaseConnectionError = false;
return tables;
}, () => {
this.databaseConnectionError = true;
return [];
});
}
else {
this.databaseConnectionError = false;
promise = Observable.of(tables).toPromise();
}
return promise;
*/
}
onAutocompleteRefreshCache() {
this.hiveService.refreshTableCache();
}
openCatalogBrowser(){
let data = new DatasetPreviewStepperDialogData(true,"Add");
let dialogConfig:MatDialogConfig = DatasetPreviewStepperDialogComponent.DIALOG_CONFIG()
dialogConfig.data = data;
dialogConfig.viewContainerRef = this.viewContainerRef;
this._dialogService.open(DatasetPreviewStepperDialogComponent,dialogConfig)
.afterClosed()
.filter(value => typeof value !== "undefined").subscribe( (response:DatasetPreviewStepperSavedEvent) => {
//add these to the canvas
this.loadingPage = true;
let sparkDataSets =response.previews.map(ds => ds.toSparkDataSet())
this.addSparkDataSets(sparkDataSets);
});
}
autoCompleteEnabledCheck(){
this.http.get("/api/v1/ui/wrangler/table-auto-complete-enabled", {responseType: 'text'}).subscribe((enabled:string|boolean) => {
this.showDatasources = enabled == true || enabled == "true";
})
}
} | the_stack |
import { showMsg } from '@/common/js/util';
import Monaco from '@/components/monaco';
import { BKHttpResponse } from '@/common/js/BKHttpResponse';
import { bkCascade } from 'bk-magic-vue';
import { Component, Inject, Prop, Vue, Watch } from 'vue-property-decorator';
import { getFieldTypeConfig, getMasterTableInfo, sqlFuncs } from '../../../Api/index';
import { IDataModelManage } from '../../../Interface/index';
import { IFieldTypeListData, IFieldTypeListRes, ISqlFuncs, ISqlFuncsData } from '../../../Interface/indexDesign';
@Component({
components: { Monaco, bkCascade },
})
export default class IndexStatisticalCaliber extends Vue {
@Inject('activeTabItem')
public activeTabItem!: function;
@Prop() public isLoading: boolean;
@Prop({ default: false }) public isRestrictedEdit!: boolean;
@Prop({ default: () => ({}) }) public calculationAtomDetailData: Object;
// 模式(新建、编辑)
@Prop({ default: 'create' }) public mode: string;
@Prop({ default: () => [] }) public aggregationLogicList: string[];
public isFieldTypeLoading = false;
public isSqlFieldLoading = false;
public monacoSetting = {
tools: {
guidUrl: this.$store.getters['docs/getPaths'].realtimeSqlRule,
toolList: {
font_size: true,
full_screen: true,
event_fullscreen_default: true,
editor_fold: false,
format_sql: false,
},
title: this.$t('SQL编辑器'),
},
options: {
fontSize: '14px',
readOnly: this.isRestrictedEdit,
},
};
public rules = {
'calculation_content.content.calculation_function': [
{
required: true,
message: '必填项',
trigger: 'blur',
},
],
'calculation_content.content.calculation_field': [
{
required: true,
message: '必填项',
trigger: 'blur',
},
],
field_type: [
{
required: true,
message: '必填项',
trigger: 'blur',
},
],
calculation_atom_name: [
{
required: true,
message: '必填项',
trigger: 'blur',
},
{
max: 32,
message: '不能多于32个字符',
trigger: 'blur',
},
{
regex: /^[a-zA-Z][a-zA-Z0-9_]*$/,
message: '只能是英文字母、下划线和数字组成,且字母开头',
trigger: 'blur',
},
],
calculation_atom_alias: [
{
required: true,
message: '必填项',
trigger: 'blur',
},
{
max: 50,
message: '不能多于50个字符',
trigger: 'blur',
},
],
description: [
{
max: 100,
message: '不能多于100个字符',
trigger: 'blur',
},
],
};
public fieldTypeList: IFieldTypeListData[] = [];
public isSelectorModel = true;
public code = '';
public params = {
// 字段类型
field_type: '',
description: '',
// 英文名
calculation_atom_name: '',
// 中文名
calculation_atom_alias: '',
// 聚合逻辑
calculation_content: {
option: 'TABLE',
content: {
// 字段
calculation_field: '',
// 方法
calculation_function: '',
// sql
calculation_formula: '',
},
},
};
/** 主表数据 */
public masterTableInfo: IDataModelManage.IMasterTableInfo;
public fieldNameList: object[] = [];
// sql函数列表
public sqlFuncsList: ISqlFuncsData[] = [];
public isSqlFuncLoading = false;
public fieldNameGroupList = [
{
id: 1,
enName: 'measure',
name: $t('度量'),
children: [],
},
{
id: 2,
enName: 'dimension',
name: $t('维度'),
children: [],
},
];
public isSameAggregationLogic = false;
// 编辑状态下的聚合逻辑的初始值
public initCode = '';
// 注释sql
public commentCode =
'-- 只需要字段的计算逻辑,不需要 SELECT、FROM 关键字\n-- 示例:人均道具销售额\n-- sum(price)/count(distinct(uin))\n\n';
get isGetSqlList() {
return this.sqlFuncsList.length && this.params.calculation_content.content.calculation_function;
}
get activeTabName() {
return this.activeTabItem ? `${this.activeTabItem().name}(${this.activeTabItem().displayName})` : '';
}
// 每个聚合函数对应的需要过滤掉的已存在的字段
get sqlFilterFieldMap() {
const globalPatten = /^(\w+)\((\w|\s)+\)$/;
const spacialPatten = /^count\(distinct/;
const result = {};
this.aggregationLogicList.forEach((item: string) => {
if (globalPatten.test(item)) {
let [sqlFunc, sqlField] = item.substring(0, item.length - 1).split('(');
// count(distinct ...) 特殊情况,需要判断
// count(distinct price) 示例
if (spacialPatten.test(item)) {
sqlFunc = 'count_distinct';
sqlField = item.substring(0, item.length - 1).split(' ')[1];
}
if (!result[sqlFunc]) {
result[sqlFunc] = [sqlField];
} else {
result[sqlFunc].push(sqlField);
}
}
});
return result;
}
// 经过去除空格的sqlfield列表
get existSqlFieldList() {
return this.aggregationLogicList.map(item => this.dropSpace(item));
}
@Watch('isGetSqlList')
public onIsGetSqlListChanged(val: boolean) {
if (this.mode === 'create') {
return;
}
if (val) {
this.getMasterTableInfo(
this.sqlFuncsList.find(
item => item.functionName === this.params.calculation_content.content.calculation_function
)?.allowFieldType
);
}
}
@Watch('calculationAtomDetailData', { immediate: true, deep: true })
public onCalculationAtomDetailDataChanged(val) {
this.params = Object.assign({}, this.params, val);
this.isSelectorModel = this.params?.calculation_content?.option === 'TABLE';
this.$nextTick(() => {
if (!this.isSelectorModel) {
this.code = this.params?.calculation_content?.content?.calculation_formula;
if (!this.initCode) {
this.initCode = this.code;
}
}
});
}
public mounted() {
this.getFieldTypeConfig();
this.sqlFuncs();
}
// 去掉字符串中的空格
public dropSpace(str: string) {
return str.replace(/\s+/g, '');
}
public sqlFuncChange(sqlName: string, sqlData: ISqlFuncsData) {
this.params.calculation_content.content.calculation_field = '';
this.getMasterTableInfo(sqlData.allowFieldType);
}
public selectToggle(status: boolean) {
if (status && !this.params.calculation_content.content.calculation_function) {
showMsg('请先选择聚合函数!', 'warning', { delay: 5000 });
}
}
public sqlFuncs() {
this.isSqlFuncLoading = true;
sqlFuncs()
.then(res => {
if (res.validateResult()) {
const instance = new BKHttpResponse<ISqlFuncs>(res);
instance.setData(this, 'sqlFuncsList');
}
})
.finally(() => {
this.isSqlFuncLoading = false;
});
}
public getFieldTypeConfig() {
this.isFieldTypeLoading = true;
getFieldTypeConfig('string')
.then(res => {
if (res.validateResult()) {
const instance = new BKHttpResponse<IFieldTypeListRes>(res);
instance.setData(this, 'fieldTypeList');
}
})
.finally(() => {
this.isFieldTypeLoading = false;
});
}
public getMasterTableInfo(allowFieldType: string[]) {
this.isSqlFieldLoading = true;
this.fieldNameGroupList = [
{
id: 1,
enName: 'measure',
name: $t('度量'),
children: [],
},
{
id: 2,
enName: 'dimension',
name: $t('维度'),
children: [],
},
];
getMasterTableInfo(this.$route.params.modelId, false, allowFieldType)
.then(res => {
if (res.validateResult()) {
const instance = new BKHttpResponse<IDataModelManage.IMasterTableInfo>(res);
instance.setData(this, 'masterTableInfo');
this.masterTableInfo.fields.forEach(item => {
const calcFunc = this.params.calculation_content.content.calculation_function;
this.fieldNameGroupList
.find(child => child.enName === item.fieldCategory)
?.children.push({
id: item.fieldName,
name: `${item.fieldName}(${item.fieldAlias})`,
disabled: this.sqlFilterFieldMap[calcFunc]
? this.sqlFilterFieldMap[calcFunc].includes(
item.fieldName
)
: false,
});
});
}
})
.finally(() => {
this.isSqlFieldLoading = false;
});
}
public handleSubmit() {
if (!this.isSelectorModel) {
if (this.isSameAggregationLogic) {
showMsg('聚合逻辑在当前模型下已存在,请重新选择!', 'warning', { delay: 5000 });
return;
}
}
this.$refs.dimensionForm.validate().then(res => {
if (!this.isSelectorModel) {
// sql模式
this.params.calculation_content = {
option: 'SQL',
content: {
calculation_formula: this.code,
},
};
}
this.$emit('submit', this.params);
});
}
public handleChangeCode(content: string) {
this.code = content;
if (this.code === this.initCode) {
return;
} // 编辑状态时,对sql组件初次赋值避免报错
this.isSameAggregationLogic = this.existSqlFieldList.includes(this.dropSpace(this.code));
if (this.isSameAggregationLogic) {
showMsg('聚合逻辑在当前模型下已存在,请重新选择!', 'warning', { delay: 5000 });
}
}
public handleChangeModel() {
this.isSelectorModel = false;
if (
!this.params.calculation_content.content.calculation_function
&& !this.params.calculation_content.content.calculation_field
) {
this.code = this.commentCode;
return;
}
if (this.params.calculation_content.content.calculation_function === 'count_distinct') {
// distinct特殊情况 形如'count(distinct {})'
this.code = this.commentCode
+ `count(distinct \`${this.params.calculation_content.content.calculation_field}\`)`;
} else {
this.code = this.commentCode
+ `${this.params.calculation_content.content.calculation_function}
(\`${this.params.calculation_content.content.calculation_field}\`)`;
}
}
} | the_stack |
jest.mock('../helpers');
import { createPublishBitbucketCloudAction } from './bitbucketCloud';
import { rest } from 'msw';
import { setupServer } from 'msw/node';
import { setupRequestMockHandlers } from '@backstage/backend-test-utils';
import { ScmIntegrations } from '@backstage/integration';
import { ConfigReader } from '@backstage/config';
import { getVoidLogger } from '@backstage/backend-common';
import { PassThrough } from 'stream';
import { initRepoAndPush } from '../helpers';
describe('publish:bitbucketCloud', () => {
const config = new ConfigReader({
integrations: {
bitbucketCloud: [
{
username: 'u',
appPassword: 'p',
},
],
},
});
const integrations = ScmIntegrations.fromConfig(config);
const action = createPublishBitbucketCloudAction({ integrations, config });
const mockContext = {
input: {
repoUrl: 'bitbucket.org?workspace=workspace&project=project&repo=repo',
repoVisibility: 'private' as const,
},
workspacePath: 'wsp',
logger: getVoidLogger(),
logStream: new PassThrough(),
output: jest.fn(),
createTemporaryDirectory: jest.fn(),
};
const server = setupServer();
setupRequestMockHandlers(server);
beforeEach(() => {
jest.resetAllMocks();
});
it('should throw an error when the repoUrl is not well formed', async () => {
await expect(
action.handler({
...mockContext,
input: {
...mockContext.input,
repoUrl: 'bitbucket.org?project=project&repo=repo',
},
}),
).rejects.toThrow(/missing workspace/);
await expect(
action.handler({
...mockContext,
input: {
...mockContext.input,
repoUrl: 'bitbucket.org?workspace=workspace&repo=repo',
},
}),
).rejects.toThrow(/missing project/);
await expect(
action.handler({
...mockContext,
input: {
...mockContext.input,
repoUrl: 'bitbucket.org?workspace=workspace&project=project',
},
}),
).rejects.toThrow(/missing repo/);
});
it('should throw if there is no integration config provided', async () => {
await expect(
action.handler({
...mockContext,
input: {
...mockContext.input,
repoUrl: 'missing.com?workspace=workspace&project=project&repo=repo',
},
}),
).rejects.toThrow(/No matching integration configuration/);
});
it('should throw if there is no token in the integration config that is returned', async () => {
const configNoCreds = new ConfigReader({
integrations: {
bitbucketCloud: [],
},
});
const integrationsNoCreds = ScmIntegrations.fromConfig(configNoCreds);
const actionNoCreds = createPublishBitbucketCloudAction({
integrations: integrationsNoCreds,
config: configNoCreds,
});
await expect(actionNoCreds.handler(mockContext)).rejects.toThrow(
/Authorization has not been provided for Bitbucket Cloud/,
);
});
it('should call the correct APIs', async () => {
expect.assertions(2);
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(req, res, ctx) => {
expect(req.headers.get('Authorization')).toBe('Basic dTpw');
expect(req.body).toEqual({
is_private: true,
scm: 'git',
project: { key: 'project' },
});
return res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/repo',
},
],
},
}),
);
},
),
);
await action.handler(mockContext);
});
it('should work if the token is provided through ctx.input', async () => {
expect.assertions(2);
const token = 'user-token';
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(req, res, ctx) => {
expect(req.headers.get('Authorization')).toBe(`Bearer ${token}`);
expect(req.body).toEqual({
is_private: true,
scm: 'git',
project: { key: 'project' },
});
return res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/repo',
},
],
},
}),
);
},
),
);
await action.handler({
...mockContext,
input: {
...mockContext.input,
token: token,
},
});
});
it('should call initAndPush with the correct values', async () => {
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await action.handler(mockContext);
expect(initRepoAndPush).toHaveBeenCalledWith({
dir: mockContext.workspacePath,
remoteUrl: 'https://bitbucket.org/workspace/cloneurl',
defaultBranch: 'master',
auth: { username: 'u', password: 'p' },
logger: mockContext.logger,
gitAuthorInfo: {},
});
});
it('should call initAndPush with the correct default branch', async () => {
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await action.handler({
...mockContext,
input: {
...mockContext.input,
defaultBranch: 'main',
},
});
expect(initRepoAndPush).toHaveBeenCalledWith({
dir: mockContext.workspacePath,
remoteUrl: 'https://bitbucket.org/workspace/cloneurl',
defaultBranch: 'main',
auth: { username: 'u', password: 'p' },
logger: mockContext.logger,
gitAuthorInfo: {},
});
});
it('should call initAndPush with the configured defaultAuthor', async () => {
const customAuthorConfig = new ConfigReader({
integrations: {
bitbucketCloud: [
{
username: 'u',
appPassword: 'p',
},
],
},
scaffolder: {
defaultAuthor: {
name: 'Test',
email: 'example@example.com',
},
},
});
const customAuthorIntegrations =
ScmIntegrations.fromConfig(customAuthorConfig);
const customAuthorAction = createPublishBitbucketCloudAction({
integrations: customAuthorIntegrations,
config: customAuthorConfig,
});
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await customAuthorAction.handler(mockContext);
expect(initRepoAndPush).toHaveBeenCalledWith({
dir: mockContext.workspacePath,
remoteUrl: 'https://bitbucket.org/workspace/cloneurl',
auth: { username: 'u', password: 'p' },
logger: mockContext.logger,
defaultBranch: 'master',
gitAuthorInfo: { name: 'Test', email: 'example@example.com' },
});
});
it('should call initAndPush with the configured defaultCommitMessage', async () => {
const customAuthorConfig = new ConfigReader({
integrations: {
bitbucketCloud: [
{
username: 'u',
appPassword: 'p',
},
],
},
scaffolder: {
defaultCommitMessage: 'Test commit message',
},
});
const customAuthorIntegrations =
ScmIntegrations.fromConfig(customAuthorConfig);
const customAuthorAction = createPublishBitbucketCloudAction({
integrations: customAuthorIntegrations,
config: customAuthorConfig,
});
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await customAuthorAction.handler(mockContext);
expect(initRepoAndPush).toHaveBeenCalledWith({
dir: mockContext.workspacePath,
remoteUrl: 'https://bitbucket.org/workspace/cloneurl',
auth: { username: 'u', password: 'p' },
logger: mockContext.logger,
defaultBranch: 'master',
commitMessage: 'Test commit message',
gitAuthorInfo: { email: undefined, name: undefined },
});
});
it('should call outputs with the correct urls', async () => {
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await action.handler(mockContext);
expect(mockContext.output).toHaveBeenCalledWith(
'remoteUrl',
'https://bitbucket.org/workspace/cloneurl',
);
expect(mockContext.output).toHaveBeenCalledWith(
'repoContentsUrl',
'https://bitbucket.org/workspace/repo/src/master',
);
});
it('should call outputs with the correct urls with correct default branch', async () => {
server.use(
rest.post(
'https://api.bitbucket.org/2.0/repositories/workspace/repo',
(_, res, ctx) =>
res(
ctx.status(200),
ctx.set('Content-Type', 'application/json'),
ctx.json({
links: {
html: {
href: 'https://bitbucket.org/workspace/repo',
},
clone: [
{
name: 'https',
href: 'https://bitbucket.org/workspace/cloneurl',
},
],
},
}),
),
),
);
await action.handler({
...mockContext,
input: {
...mockContext.input,
defaultBranch: 'main',
},
});
expect(mockContext.output).toHaveBeenCalledWith(
'remoteUrl',
'https://bitbucket.org/workspace/cloneurl',
);
expect(mockContext.output).toHaveBeenCalledWith(
'repoContentsUrl',
'https://bitbucket.org/workspace/repo/src/main',
);
});
}); | the_stack |
import log from 'electron-log';
import { delay, eventChannel, SagaIterator } from 'redux-saga';
import { all, call, put, select } from 'redux-saga/effects';
import {
askEnableNotifications,
disableNotifications,
enableNotifications,
navigateToApplicationTabAutomatically,
} from '../../app/applications/duck';
import { MAIN_APP_READY } from '../app/duck';
import { ASK_ENABLE_NOTIFICATIONS, TOGGLE_NOTIFICATIONS } from '../applications/duck';
import { getNotificationsEnabled } from '../applications/selectors';
import { addNotification, clearNotifications, removeNotification, RequestForApplicationNotificationsStep } from '../notifications/duck';
import {
getNotificationBody,
getNotificationIcon,
getNotificationOptions,
getNotificationTabId,
getNotificationTitle,
getNotificationWebContentsId,
} from '../notifications/get';
import { getNotificationById } from '../notifications/selectors';
import { getProvider } from '../plugins';
import DeprecatedSDKProvider from '../plugins/SDKProvider';
import { observer } from '../services/lib/helpers';
import { NewNotificationProps } from '../services/services/tab-webcontents/interface';
import { ATTACH_WEBCONTENTS_TO_TAB } from '../tab-webcontents/duck';
import { getWebcontentsIdForTabId } from '../tab-webcontents/selectors';
import { getTabApplicationId } from '../tabs/get';
import { getTabById } from '../tabs/selectors';
import {
callService,
createWebContentsServiceObserverChannel,
takeEveryWithAck,
takeEveryWitness,
takeLatestWitness,
} from '../utils/sagas';
import { showOSNotification } from './api';
import { INFINITE, SYNC_WITH_OS } from './constants';
import {
appendNotification,
AskEnableNotificationsAction,
MARK_ALL_AS_READ,
MARK_AS_READ,
markAsRead,
MarkAsReadAction,
NEW_NOTIFICATION,
newNotification,
NewNotificationAction,
NOTIFICATION_CLICK,
notificationClick,
NotificationClickAction,
removeAllNotifications,
removeNotification as removeNotificationFromNotificationCenter,
RESET_SNOOZE_DURATION,
resetSnoozeDuration,
ResetSnoozeDurationAction,
resetSnoozeStartedOn,
SET_SNOOZE_DURATION,
setSnoozeDuration,
SetSnoozeDurationAction,
setSnoozeStartedOn,
setVisibility,
SHOW_NOTIFICATION,
showNotification,
ShowNotificationAction,
TOGGLE_VISIBILITY,
} from './duck';
import ElectronNotificationStatePoller from './lib/ElectronNotificationStatePoller';
import { getSnoozeDuration, isVisible } from './selectors';
import { RPC } from '../services/lib/types';
import { OSNotification } from '../services/services/os-notification/interface';
const ms = require('ms');
function* sagaSnooze(action: SetSnoozeDurationAction): SagaIterator {
const { snooze }: DeprecatedSDKProvider = yield call(getProvider);
const { via, duration } = action;
// Update start date
yield put(setSnoozeStartedOn(Date.now()));
snooze.triggerSet(duration);
if (duration === SYNC_WITH_OS) {
log.debug('snooze is synced with OS');
return;
}
if (duration === INFINITE) {
log.debug('snooze started for an infinite time');
return;
}
const durationInMs: number = ms(duration);
log.debug(`snooze started: ${duration}`);
yield call(delay, durationInMs);
yield put(resetSnoozeDuration(via));
log.debug('snooze finish');
}
function* sagaResetSnooze(action: ResetSnoozeDurationAction): SagaIterator {
const { snooze }: DeprecatedSDKProvider = yield call(getProvider);
snooze.triggerReset();
// Clear start date
yield put(resetSnoozeStartedOn());
}
function* sagaNewNotification(action: NewNotificationAction): SagaIterator {
// Call listeners
const { notifications }: DeprecatedSDKProvider = yield call(getProvider);
const [e, tamperedAction] = yield call([notifications, notifications.callNew], action);
if (e.isDefaultPrevented()) return;
const { applicationId, tabId, notificationId, options, props: { title, timestamp, body, icon } } = tamperedAction;
yield put(addNotification(
notificationId,
{ applicationId, tabId, title, timestamp, body, icon,
full: options.full, silent: options.silent, webContentsId: options.webContentsId }
));
yield put(appendNotification(notificationId));
yield put(showNotification(notificationId));
}
function* sagaShowNotification(action: ShowNotificationAction): SagaIterator {
const snooze = yield select(getSnoozeDuration);
const { notificationId } = action;
const notificationState = yield select(getNotificationById, notificationId);
if (!notificationState || snooze) {
return;
}
const notif = yield call(showOSNotification, {
title: getNotificationTitle(notificationState),
body: getNotificationBody(notificationState),
imageURL: getNotificationIcon(notificationState),
silent: getNotificationOptions(notificationState).silent,
});
yield takeEveryWitness(notificationClickChannel(notif), function* handle() {
yield put(notificationClick(notificationId, 'pop_up'));
});
}
const notificationClickChannel = (notif: RPC.Node<OSNotification>) => eventChannel(emit => {
notif.addObserver(observer({
onClick: () => emit({}),
}));
return () => { };
});
function* sagaNotificationClick(action: NotificationClickAction): SagaIterator {
const { notificationId } = action;
// Mark notification as read
yield put(markAsRead(notificationId));
const notification = yield select(getNotificationById, notificationId);
if (!notification) return;
let webcontentsId = getNotificationWebContentsId(notification);
if (!webcontentsId) {
const tabId = getNotificationTabId(notification);
if (!tabId) return;
yield put(navigateToApplicationTabAutomatically(tabId));
webcontentsId = yield select(getWebcontentsIdForTabId, tabId);
if (!webcontentsId) return;
}
yield callService('osNotification', 'triggerClick', webcontentsId, notificationId);
}
function* sagaMarkAsRead(action: MarkAsReadAction): SagaIterator {
const { notificationId } = action;
yield put(removeNotificationFromNotificationCenter(notificationId));
yield put(removeNotification(notificationId));
}
function* sagaMarkAllAsRead(): SagaIterator {
yield put(removeAllNotifications());
yield put(clearNotifications());
}
function* interceptNotificationEventsFromWebContents({ webcontentsId, tabId }: { webcontentsId: number, tabId: string }) {
const tab = yield select(getTabById, tabId);
if (!tab) return;
const applicationId = getTabApplicationId(tab);
const newNotificationChannel = createWebContentsServiceObserverChannel(
webcontentsId, 'addNotificationsObserver', 'onNewNotification', 'intercept-notif-open');
yield takeEveryWitness(newNotificationChannel, function* handle(props: NewNotificationProps) {
const isNotifEnabled = yield select(getNotificationsEnabled, applicationId);
// disable notification if explicity choose it
if (isNotifEnabled === false) return;
if (isNotifEnabled === undefined) {
yield put(askEnableNotifications({
applicationId,
tabId,
notificationId: props.id,
props,
step: RequestForApplicationNotificationsStep.ASK,
}));
}
yield put(newNotification(applicationId, tabId, props.id, props));
});
const notificationCloseChannel = createWebContentsServiceObserverChannel(
webcontentsId, 'addNotificationsObserver', 'onNotificationClose', 'intercept-notif-close');
yield takeEveryWitness(notificationCloseChannel, function* handle(/*notificationId: string*/) {
// AL: From my understanding of the [specs](https://notifications.spec.whatwg.org/#dom-notification-close)
// calling Notification#close should remove the notification from the notification center
// however, most apps (like Slack) misinterpreted the `#close` and use it to make sure a
// Notification doe not stay on screen.
// Therefore, when `#close` is called, by default, we do not remove the notification
// — unless `removeNotificationOnClose` is set in service data
// TODO fetch flag from manifest?
// yield put(removeNotificationFromNotificationCenter(notificationId));
// yield put(removeNotification(notificationId));
});
}
function* sagaToggleVisibility(): SagaIterator {
const visible = yield select(isVisible);
yield put(setVisibility(!visible));
}
function pollerEmitterChannel() {
return eventChannel((emitter: any) => {
const poller = new ElectronNotificationStatePoller();
poller.on('os-dnd-state', (event: any) =>
emitter(event)
);
return () => {
log.debug('[POLLER EMITTER CHANNEL] Channel Off');
poller.stop();
};
});
}
function* electronNotificationStatePoller(): SagaIterator {
const chan = yield call(pollerEmitterChannel);
yield takeEveryWitness(chan, function* handle(state: any) {
log.debug('[NOTIFICATION STATE POLLER] event is', state);
if (state) {
yield put(setSnoozeDuration('os', SYNC_WITH_OS));
} else {
yield put(resetSnoozeDuration('os'));
}
});
}
function* askEnableNotificationsFlow({ applicationId, tabId, notificationId, props, step }: AskEnableNotificationsAction): SagaIterator {
if (step === RequestForApplicationNotificationsStep.ENABLE) {
yield put(enableNotifications(applicationId));
yield put(askEnableNotifications({
applicationId,
tabId,
notificationId,
props,
step: RequestForApplicationNotificationsStep.FINISH,
}));
}
if (step === RequestForApplicationNotificationsStep.DISABLE) {
yield put(disableNotifications(applicationId));
yield put(askEnableNotifications({
applicationId,
tabId,
notificationId,
props,
step: RequestForApplicationNotificationsStep.FINISH,
}));
}
}
function* toggleAppNotifications({ applicationId }: { applicationId: string }): SagaIterator {
if (yield select(getNotificationsEnabled, applicationId)) {
yield put(disableNotifications(applicationId));
} else {
yield put(enableNotifications(applicationId));
}
}
export default function* main(): SagaIterator {
yield all([
takeEveryWitness(ATTACH_WEBCONTENTS_TO_TAB, interceptNotificationEventsFromWebContents),
takeLatestWitness(SET_SNOOZE_DURATION, sagaSnooze),
takeLatestWitness(RESET_SNOOZE_DURATION, sagaResetSnooze),
takeEveryWithAck(NEW_NOTIFICATION, sagaNewNotification),
takeEveryWitness(NOTIFICATION_CLICK, sagaNotificationClick),
takeEveryWitness(SHOW_NOTIFICATION, sagaShowNotification),
takeEveryWitness(MARK_AS_READ, sagaMarkAsRead),
takeEveryWitness(MARK_ALL_AS_READ, sagaMarkAllAsRead),
takeEveryWitness(TOGGLE_VISIBILITY, sagaToggleVisibility),
takeEveryWitness(MAIN_APP_READY, electronNotificationStatePoller),
takeEveryWitness(ASK_ENABLE_NOTIFICATIONS, askEnableNotificationsFlow),
takeEveryWitness(TOGGLE_NOTIFICATIONS, toggleAppNotifications),
]);
} | the_stack |
import React, { Component, createRef } from 'react';
import { Animated, LayoutChangeEvent, View, ViewProps } from 'react-native';
import Thumb, { NullableThumb, ThumbGestureCallback } from '../internal/Thumb';
import { getTheme } from '../theme';
import { NumRange } from '../types';
// The max scale of the thumb
const THUMB_SCALE_RATIO = 1.3;
// Width of the thumb border
const THUMB_BORDER_WIDTH = 2;
// extra spacing enlarging the touchable area
const TRACK_EXTRA_MARGIN_V = 5;
const TRACK_EXTRA_MARGIN_H = 5;
/** Props of {@link RangeSlider} */
export interface RangeSliderProps extends ViewProps {
/**
* Lower limit of the range, default is `0`
* @defaultValue `0`
*/
min: number;
/**
* Upper limit of the range, default is `100`
* @defaultValue `100`
*/
max: number;
/** Initial value of range */
range?: NumRange;
/** The thickness of the RangeSlider track */
trackSize?: number;
/** Radius of the thumb of the RangeSlider */
thumbRadius?: number;
/** Padding for the hitSlop on the RangeSlider thumb */
thumbPadding?: number;
/** Color of the lower part of the track, it's also the color of the thumb */
lowerTrackColor?: any;
/** Color of the upper part of the track */
upperTrackColor?: any;
/** Callback when drag gesture begins */
onStart?: ThumbGestureCallback;
/** Callback when value changed */
onChange?: (range: NumRange) => void;
/** Callback when the value is confirmed */
onConfirm?: (range: NumRange) => void;
/** Step value of the RangeSlider, must be a divisor of max */
step?: number;
}
const emptyRange = { min: 0, max: 0 };
/** Default props of {@link RangeSlider}, see {@link RangeSliderProps} */
const defaultProps: RangeSliderProps = {
thumbPadding: 0,
thumbRadius: 6,
trackSize: 2,
min: 0,
max: 100,
range: emptyRange,
step: 1,
};
/**
* The `RangeSlider` component.
*
* @remarks
* See {@link RangeSliderProps} for the available props.
* Refer to {@link https://material.io/design/components/sliders.html | Guideline} or {@link http://www.getmdl.io/components/index.html#sliders-section | MDL implementation}
*/
export default class RangeSlider extends Component<RangeSliderProps> {
/** Defaults, see {@link defaultProps} */
static defaultProps = defaultProps;
/** Reference to App's {@link Theme} */
private theme = getTheme();
private minThumbRef = createRef<Thumb>();
private maxThumbRef = createRef<Thumb>();
private trackRef = createRef<View>();
private overrideThumb: NullableThumb;
private _range: NumRange = { min: 0, max: 0 };
private _trackTotalLength = 0;
private _trackMarginH = 0;
private _trackMarginV = 0;
private _thumbRadiiWithBorder = 0;
private readonly _lowerTrackLength: Animated.Value;
private readonly _lowerTrackMin: Animated.Value;
constructor(props: RangeSliderProps) {
super(props);
// this.overrideThumbRef = undefined;
this.overrideThumb = undefined;
this._lowerTrackLength = new Animated.Value(this._range.max - this._range.min);
this._lowerTrackMin = new Animated.Value(this._range.min);
}
/** Public api to update the current `minValue` */
set minValue(value: number) {
const range = this._setRange({
min: value,
max: this._toSliderScale(this._range.max),
});
this._updateValue(range);
this._emitChange();
}
/** Retrieve the current `minValue` */
get minValue(): number {
return this._toSliderScale(this._range.min);
}
/** Public api to update the current `maxValue` */
set maxValue(value: number) {
const range = this._setRange({
min: this._toSliderScale(this._range.min),
max: value,
});
this._updateValue(range);
this._emitChange();
}
/** Retrieve the current `maxValue` */
get maxValue(): number {
return this._toSliderScale(this._range.max);
}
private get minThumb(): NullableThumb {
return this.minThumbRef && this.minThumbRef.current;
}
private get maxThumb(): NullableThumb {
return this.maxThumbRef && this.maxThumbRef.current;
}
UNSAFE_componentWillMount() {
this._onThumbRadiiUpdate(this.props);
}
UNSAFE_componentWillReceiveProps(nextProps: RangeSliderProps) {
this._onThumbRadiiUpdate(nextProps);
this._setRange(nextProps.range || emptyRange);
this._updateValue(this._range);
}
render() {
this._verifyStep();
// making room for the Thumb, cause's Android doesn't support `overflow: visible`
// - @see http://bit.ly/1Fzr5SE
const trackMargin = {
marginLeft: this._trackMarginH,
marginRight: this._trackMarginH,
marginTop: this._trackMarginV,
marginBottom: this._trackMarginV,
};
const sliderStyle = this.theme.sliderStyle;
// @ts-ignore
const lowerTrackColor = this.props.lowerTrackColor || sliderStyle.lowerTrackColor;
// @ts-ignore
const upperTrackColor = this.props.upperTrackColor || sliderStyle.upperTrackColor;
return (
<View
style={[
this.props.style,
{
padding: 0,
paddingTop: 0,
paddingBottom: 0,
paddingLeft: 0,
paddingRight: 0,
},
]}
>
<View
ref={this.trackRef}
style={{
height: this.props.trackSize,
backgroundColor: upperTrackColor,
...trackMargin,
}}
onLayout={this._onTrackLayout}
>
<Animated.View
style={{
position: 'absolute',
left: this._lowerTrackMin,
width: this._lowerTrackLength,
height: this.props.trackSize,
backgroundColor: lowerTrackColor,
}}
/>
</View>
<Thumb
ref={this.minThumbRef}
radius={this.props.thumbRadius}
enabledColor={lowerTrackColor}
disabledColor={upperTrackColor}
onGrant={this._beginMove}
onMove={this._updateValueByTouch}
onEnd={this._endMove}
touchPadding={this.props.thumbPadding}
style={{
top: this._thumbRadiiWithBorder * (THUMB_SCALE_RATIO - 1) + TRACK_EXTRA_MARGIN_V,
}}
/>
<Thumb
ref={this.maxThumbRef}
radius={this.props.thumbRadius}
enabledColor={lowerTrackColor}
disabledColor={upperTrackColor}
onGrant={this._beginMove}
onMove={this._updateValueByTouch}
onEnd={this._endMove}
touchPadding={this.props.thumbPadding}
style={{
top: this._thumbRadiiWithBorder * (THUMB_SCALE_RATIO - 1) + TRACK_EXTRA_MARGIN_V,
}}
/>
</View>
);
}
// region private property initializers
/** callback when the Track component's layout changes */
private _onTrackLayout = ({
nativeEvent: {
layout: { width },
},
}: LayoutChangeEvent) => {
if (this._trackTotalLength !== width) {
this._trackTotalLength = width;
this._setRange(this.props.range || emptyRange);
this._updateValue(this._range);
}
};
// Respond to Grant gestures
private _beginMove: ThumbGestureCallback = (ref, evt) => {
this.props.onStart && this.props.onStart(ref, evt);
this._updateValueByTouch(ref, evt);
};
// Respond to both cancelled and finished gestures
private _endMove: ThumbGestureCallback = (ref, evt) => {
const thumb = this.overrideThumb ? this.overrideThumb : ref;
const dx = evt.nativeEvent.pageX;
this.trackRef.current &&
this.trackRef.current.measure((fx, fy, width, height, px) => {
this._validateMove(dx, px, width);
thumb.confirmMoveTo();
this.overrideThumb = undefined;
});
this._emitConfirm();
};
// Respond to Move touch gestures
private _updateValueByTouch: ThumbGestureCallback = (ref, evt) => {
const thumb = this.overrideThumb ? this.overrideThumb : ref;
const dx = evt.nativeEvent.pageX;
this.trackRef.current &&
this.trackRef.current.measure((fx, fy, width, height, px) => {
const { currThumb, x } = this._validateMove(dx, px, width, thumb);
this._internalSetValue(currThumb, x);
this._moveThumb(currThumb, x);
});
};
// Snap thumb by step, default step = 1
private _snap = (val: number, inc = this._defaultStepIncrement()) => {
const current = Math.round(val);
const half = inc * 0.5;
const diff = current % inc;
if (diff >= half) {
const multiplier = Math.round(current / inc);
return inc * multiplier;
}
return current - diff;
};
private _defaultStepIncrement = () =>
this._toPixelScale(this.props.max) / ((this.props.max - this.props.min) / (this.props.step || 1));
// endregion
// Throw error if preset ranges are invalid
private _setRange({ min, max }: NumRange): NumRange {
const min2Scale = this._toPixelScale(min || 0);
const max2Scale = this._toPixelScale(max || 0);
const minBounds = this._toPixelScale(this.props.min);
const maxBounds = this._toPixelScale(this.props.max);
if (min2Scale > max2Scale) {
throw new Error(`Minimum slider value: ${min} is greater than max value: ${max}`);
}
if (min2Scale < minBounds || min2Scale > maxBounds) {
throw new Error(`Minimum slider value: ${min} exceeds bounds:
${this.props.min} - ${this.props.max}`);
}
if (max2Scale < minBounds || max2Scale > maxBounds) {
throw new Error(`Maximum slider value: ${max} exceeds bounds:
${this.props.min} - ${this.props.max}`);
}
// tslint:disable-next-line:no-console
console.log(`range => (${min2Scale}, ${max2Scale})`);
this._range = {
min: min2Scale,
max: max2Scale,
};
return this._range;
}
// Scale global xy coordinate values to track values
private _toSliderScale(value: number): number {
const trackToRange = (this.props.max - this.props.min) / this._trackTotalLength;
return value * trackToRange + this.props.min;
}
// Scale track values to global xy coordinate system
private _toPixelScale(value: number) {
const rangeToTrack = this._trackTotalLength / (this.props.max - this.props.min);
return (value - this.props.min) * rangeToTrack;
}
// Set values for thumb components for user touch events
private _internalSetValue(ref?: Thumb, value?: number) {
const target = ref === this.minThumb ? 'min' : 'max';
this._range[target] = value || 0;
this._emitChange();
}
// Send changed values to onChange callback
private _emitChange() {
this.props.onChange &&
this.props.onChange({
min: this._toSliderScale(this._range.min),
max: this._toSliderScale(this._range.max),
});
}
private _emitConfirm() {
this.props.onConfirm &&
this.props.onConfirm({
min: this._toSliderScale(this._range.min),
max: this._toSliderScale(this._range.max),
});
}
// Internal update of ranges. Values should be to "Pixel Scale"
private _updateValue(values: NumRange) {
if (!this._trackTotalLength) {
return;
}
const lthumb = this.minThumb;
const rthumb = this.maxThumb;
this._moveThumb(lthumb, values.min);
// lthumb && lthumb.confirmMoveTo(values.min);
lthumb && lthumb.confirmMoveTo();
this._moveThumb(rthumb, values.max);
// rthumb && rthumb.confirmMoveTo(values.max);
rthumb && rthumb.confirmMoveTo();
}
// Ensure thumbs do not cross each other or track boundaries
private _validateMove(
dx: number,
trackOriginX: number,
trackWidth: number,
thumb?: Thumb
): { currThumb?: Thumb; x?: number } {
const x = dx - trackOriginX;
const onTrack = (relX: number) => {
const upperBound = relX >= trackWidth ? trackWidth : relX;
return relX <= 0 ? 0 : upperBound;
};
if (!thumb) {
return {};
}
const minThumb = this.minThumb;
const maxThumb = this.maxThumb;
if (!minThumb || !maxThumb) {
return {};
}
let currThumb = thumb;
if (minThumb.x === maxThumb.x) {
if (x > maxThumb.x) {
currThumb = this.overrideThumb = maxThumb;
thumb && thumb.confirmMoveTo();
} else if (x < minThumb.x) {
currThumb = this.overrideThumb = minThumb;
thumb && thumb.confirmMoveTo();
}
}
let valX = 0;
if (currThumb === minThumb) {
valX = x >= maxThumb.x ? maxThumb.x : onTrack(x);
} else if (currThumb === maxThumb) {
valX = x <= minThumb.x ? minThumb.x : onTrack(x);
}
return { currThumb, x: this._snap(valX) };
}
// Induce smooth animation to move each thumb component
private _moveThumb(thumb?: NullableThumb, x?: number) {
if (!thumb || !x) {
return;
}
thumb.moveTo(x);
Animated.parallel([
Animated.timing(this._lowerTrackMin, {
toValue: this._range.min,
duration: 0,
}),
Animated.timing(this._lowerTrackLength, {
toValue: this._range.max - this._range.min,
duration: 0,
}),
]).start();
}
// when thumb radii updated, re-calc the dimensions
private _onThumbRadiiUpdate(props: RangeSliderProps) {
const thumbRadii = props.thumbRadius || 0;
this._thumbRadiiWithBorder = thumbRadii + THUMB_BORDER_WIDTH;
this._trackMarginV =
this._thumbRadiiWithBorder * THUMB_SCALE_RATIO + TRACK_EXTRA_MARGIN_V - (this.props.trackSize || 0) / 2;
this._trackMarginH = this._thumbRadiiWithBorder * THUMB_SCALE_RATIO + TRACK_EXTRA_MARGIN_H;
}
// Step must be a divisor of max
private _verifyStep() {
const divisor = (this.props.max - this.props.min) / (this.props.step || 1);
if (divisor % 1 !== 0) {
throw new Error(`Given step ( ${this.props.step} ) must be \
a divisor of max ( ${this.props.max} )`);
}
}
} | the_stack |
import { ProposedOrPrevious } from "../../src/chrono/Effect.js"
import { Base } from "../../src/class/Base.js"
import { CalculationIterator } from "../../src/primitives/Calculation.js"
import { calculate, Entity, field } from "../../src/replica/Entity.js"
import { Replica } from "../../src/replica/Replica.js"
import { Schema } from "../../src/schema/Schema.js"
import { delay } from "../../src/util/Helpers.js"
declare const StartTest : any
StartTest(t => {
t.it('Replica', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
id : string
@field()
firstName : string
@field()
lastName : string
@field()
fullName : string
@calculate('fullName')
* calculateFullName () : CalculationIterator<string> {
return (yield this.$.firstName) + ' ' + (yield this.$.lastName)
}
}
@entity
class Book extends Entity.mix(Base) {
@field()
name : string
@field()
writtenBy : Author
}
t.ok(schema.hasEntity('Author'), 'Entity added to schema')
t.ok(schema.hasEntity('Book'), 'Entity added to schema')
const replica1 = Replica.new({ schema : schema })
const markTwain = Author.new({ firstName : 'Mark', lastName : 'Twain' })
const tomSoyer = Book.new({ name : 'Tom Soyer', writtenBy : markTwain })
replica1.addEntity(markTwain)
replica1.addEntity(tomSoyer)
t.is(markTwain.fullName, 'Mark Twain', 'Correct name calculated')
markTwain.firstName = 'MARK'
t.is(markTwain.fullName, 'MARK Twain', 'Correct name calculated')
})
t.it('Simplified calculation methods', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
@field()
fullName : string
@calculate('fullName')
calculateFullName () : string {
return this.firstName + ' ' + this.lastName
}
}
const replica1 = Replica.new({ schema : schema })
const markTwain = Author.new({ firstName : 'Mark', lastName : 'Twain' })
replica1.addEntity(markTwain)
t.is(markTwain.fullName, 'Mark Twain', 'Correct name calculated')
markTwain.firstName = 'MARK'
t.is(markTwain.fullName, 'MARK Twain', 'Correct name calculated')
})
t.it('Helper methods', async t => {
class Author extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
@field()
fullName : string
@calculate('fullName')
* calculateFullName () : CalculationIterator<string> {
return (yield this.$.firstName) + ' ' + (yield this.$.lastName)
}
* helperMethod (prefix : string) : CalculationIterator<string> {
return prefix + (yield this.$.fullName)
}
}
const replica1 = Replica.new()
const markTwain = Author.new({ firstName : 'Mark', lastName : 'Twain' })
replica1.addEntity(markTwain)
t.is(markTwain.fullName, 'Mark Twain', 'Correct name calculated')
const result = markTwain.run('helperMethod', 'Mr. ')
t.is(result, 'Mr. Mark Twain', 'Correct result from helper method')
})
t.it('Should set the variable fields to `null`', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
}
const replica1 = Replica.new({ schema : schema })
const markTwain = Author.new({ lastName : 'Twain' })
replica1.addEntity(markTwain)
t.isStrict(markTwain.firstName, null, 'Correctly set uninitialized field to `null`')
t.isStrict(markTwain.lastName, 'Twain', 'Correctly set config value')
})
t.it('Should set the uninitialized fields to `null` without recomputing them on next propagation', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
@calculate('firstName')
calculateFirstName (Y) : string {
return Y(ProposedOrPrevious)
}
@calculate('lastName')
calculateLastName (Y) : string {
return Y(ProposedOrPrevious)
}
}
const replica1 = Replica.new({ schema : schema })
const markTwain = Author.new()
replica1.addEntity(markTwain)
//------------------
const spy = t.spyOn(markTwain.$.firstName, 'calculation')
replica1.commit()
t.expect(spy).toHaveBeenCalled(1)
t.isStrict(markTwain.firstName, null, 'Correctly set uninitialized field to `null`')
//------------------
spy.reset()
markTwain.lastName = 'Twain'
replica1.commit()
t.expect(spy).toHaveBeenCalled(0)
})
t.it('Replica async', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
id : string
@field()
firstName : string
@field()
lastName : string
@field({ sync : false })
fullName : string
@calculate('fullName')
* calculateFullName () : CalculationIterator<string> {
yield delay(10)
return (yield this.$.firstName) + ' ' + (yield this.$.lastName)
}
}
const replica1 = Replica.new({ schema : schema, autoCommit : true, autoCommitMode : 'async', onWriteDuringCommit : 'ignore' })
const markTwain = Author.new({ firstName : 'Mark', lastName : 'Twain' })
replica1.addEntity(markTwain)
const fullName1 = markTwain.fullName
t.isInstanceOf(fullName1, Promise)
t.is(await markTwain.fullName, 'Mark Twain', 'Correct name calculated')
// this write actually happens during the auto-commit, scheduled by the entity addition
markTwain.firstName = 'MARK'
t.is(await markTwain.fullName, 'MARK Twain', 'Correct name calculated')
})
t.it('Should be able to add the removed entity back during the same transaction', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
}
const replica1 = Replica.new({ schema : schema })
const markTwain = Author.new({ firstName : 'Mark', lastName : 'Twain' })
t.is(markTwain.firstName, 'Mark', 'Correct data after creation')
t.is(markTwain.lastName, 'Twain')
replica1.addEntity(markTwain)
t.is(markTwain.firstName, 'Mark', 'Correct data after add to graph')
t.is(markTwain.lastName, 'Twain')
replica1.commit()
t.is(markTwain.firstName, 'Mark', 'Correct data after commit')
t.is(markTwain.lastName, 'Twain')
replica1.removeEntity(markTwain)
t.is(markTwain.firstName, 'Mark', 'Correct data after remove')
t.is(markTwain.lastName, 'Twain')
replica1.addEntity(markTwain)
t.is(markTwain.firstName, 'Mark', 'Correct data after add back')
t.is(markTwain.lastName, 'Twain')
})
t.it('Should not leak fields/calculation functions to super class', async t => {
const schema = Schema.new({ name : 'Cool data schema' })
const entity = schema.getEntityDecorator()
@entity
class Author1 extends Entity.mix(Base) {
@field()
firstName : string
@field()
lastName : string
@field()
fullName : string
@field()
someField : string
@calculate('someField')
* calculateSomeField () : CalculationIterator<string> {
return 'someField'
}
}
class Author2 extends Author1 {
@calculate('fullName')
* calculateFullName () : CalculationIterator<string> {
t.is(this, author2, 'Should not call this method on `author1` instance')
return (yield this.$.firstName) + ' ' + (yield this.$.lastName)
}
}
const replica2 = Replica.new({ schema : schema })
const author2 = Author2.new({ firstName : 'Mark', lastName : 'Twain', fullName : "some name" })
replica2.addEntity(author2)
const replica1 = Replica.new({ schema : schema })
const author1 = Author1.new({ firstName : 'Mark', lastName : 'Twain', fullName : "some name" })
replica1.addEntity(author1)
t.is(author1.fullName, 'some name', 'Correct name calculated')
t.is(author2.fullName, 'Mark Twain', 'Correct name calculated')
})
}) | the_stack |
import { GaxiosPromise } from 'gaxios';
import { Compute, JWT, OAuth2Client, UserRefreshClient } from 'google-auth-library';
import { APIRequestContext, BodyResponseCallback, GlobalOptions, GoogleConfigurable, MethodOptions } from 'googleapis-common';
export declare namespace groupssettings_v1 {
interface Options extends GlobalOptions {
version: 'v1';
}
interface StandardParameters {
/**
* Data format for the response.
*/
alt?: string;
/**
* Selector specifying which fields to include in a partial response.
*/
fields?: string;
/**
* API key. Your API key identifies your project and provides you with API
* access, quota, and reports. Required unless you provide an OAuth 2.0
* token.
*/
key?: string;
/**
* OAuth 2.0 token for the current user.
*/
oauth_token?: string;
/**
* Returns response with indentations and line breaks.
*/
prettyPrint?: boolean;
/**
* An opaque string that represents a user for quota purposes. Must not
* exceed 40 characters.
*/
quotaUser?: string;
/**
* Deprecated. Please use quotaUser instead.
*/
userIp?: string;
}
/**
* Groups Settings API
*
* Manages permission levels and related settings of a group.
*
* @example
* const {google} = require('googleapis');
* const groupssettings = google.groupssettings('v1');
*
* @namespace groupssettings
* @type {Function}
* @version v1
* @variation v1
* @param {object=} options Options for Groupssettings
*/
class Groupssettings {
context: APIRequestContext;
groups: Resource$Groups;
constructor(options: GlobalOptions, google?: GoogleConfigurable);
}
/**
* JSON template for Group resource
*/
interface Schema$Groups {
/**
* Are external members allowed to join the group.
*/
allowExternalMembers?: string;
/**
* Is google allowed to contact admins.
*/
allowGoogleCommunication?: string;
/**
* If posting from web is allowed.
*/
allowWebPosting?: string;
/**
* If the group is archive only
*/
archiveOnly?: string;
/**
* Custom footer text.
*/
customFooterText?: string;
/**
* Default email to which reply to any message should go.
*/
customReplyTo?: string;
/**
* If any of the settings that will be merged have custom roles which is
* anything other than owners, managers, or group scopes.
*/
customRolesEnabledForSettingsToBeMerged?: string;
/**
* Default message deny notification message
*/
defaultMessageDenyNotificationText?: string;
/**
* Description of the group
*/
description?: string;
/**
* Email id of the group
*/
email?: string;
/**
* If a primary Collab Inbox feature is enabled.
*/
enableCollaborativeInbox?: string;
/**
* If favorite replies should be displayed above other replies.
*/
favoriteRepliesOnTop?: string;
/**
* Whether to include custom footer.
*/
includeCustomFooter?: string;
/**
* If this groups should be included in global address list or not.
*/
includeInGlobalAddressList?: string;
/**
* If the contents of the group are archived.
*/
isArchived?: string;
/**
* The type of the resource.
*/
kind?: string;
/**
* Maximum message size allowed.
*/
maxMessageBytes?: number;
/**
* Can members post using the group email address.
*/
membersCanPostAsTheGroup?: string;
/**
* Default message display font. Possible values are: DEFAULT_FONT
* FIXED_WIDTH_FONT
*/
messageDisplayFont?: string;
/**
* Moderation level for messages. Possible values are: MODERATE_ALL_MESSAGES
* MODERATE_NON_MEMBERS MODERATE_NEW_MEMBERS MODERATE_NONE
*/
messageModerationLevel?: string;
/**
* Name of the Group
*/
name?: string;
/**
* Primary language for the group.
*/
primaryLanguage?: string;
/**
* Whome should the default reply to a message go to. Possible values are:
* REPLY_TO_CUSTOM REPLY_TO_SENDER REPLY_TO_LIST REPLY_TO_OWNER
* REPLY_TO_IGNORE REPLY_TO_MANAGERS
*/
replyTo?: string;
/**
* Should the member be notified if his message is denied by owner.
*/
sendMessageDenyNotification?: string;
/**
* Is the group listed in groups directory
*/
showInGroupDirectory?: string;
/**
* Moderation level for messages detected as spam. Possible values are:
* ALLOW MODERATE SILENTLY_MODERATE REJECT
*/
spamModerationLevel?: string;
/**
* Permissions to add members. Possible values are: ALL_MANAGERS_CAN_ADD
* ALL_OWNERS_CAN_ADD ALL_MEMBERS_CAN_ADD NONE_CAN_ADD
*/
whoCanAdd?: string;
/**
* Permission to add references to a topic. Possible values are: NONE
* OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanAddReferences?: string;
/**
* Permission to approve members. Possible values are:
* ALL_OWNERS_CAN_APPROVE ALL_MANAGERS_CAN_APPROVE ALL_MEMBERS_CAN_APPROVE
* NONE_CAN_APPROVE
*/
whoCanApproveMembers?: string;
/**
* Permission to approve pending messages in the moderation queue. Possible
* values are: NONE OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanApproveMessages?: string;
/**
* Permission to assign topics in a forum to another user. Possible values
* are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanAssignTopics?: string;
/**
* Permission for content assistants. Possible values are: Possible values
* are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanAssistContent?: string;
/**
* Permission to ban users. Possible values are: NONE OWNERS_ONLY
* OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanBanUsers?: string;
/**
* Permission to contact owner of the group via web UI. Possible values are:
* ANYONE_CAN_CONTACT ALL_IN_DOMAIN_CAN_CONTACT ALL_MEMBERS_CAN_CONTACT
* ALL_MANAGERS_CAN_CONTACT
*/
whoCanContactOwner?: string;
/**
* Permission to delete replies to topics. Possible values are: NONE
* OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanDeleteAnyPost?: string;
/**
* Permission to delete topics. Possible values are: NONE OWNERS_ONLY
* OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanDeleteTopics?: string;
/**
* Permission for who can discover the group. Possible values are:
* ALL_MEMBERS_CAN_DISCOVER ALL_IN_DOMAIN_CAN_DISCOVER ANYONE_CAN_DISCOVER
*/
whoCanDiscoverGroup?: string;
/**
* Permission to enter free form tags for topics in a forum. Possible values
* are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanEnterFreeFormTags?: string;
/**
* Permission to hide posts by reporting them as abuse. Possible values are:
* NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanHideAbuse?: string;
/**
* Permissions to invite members. Possible values are:
* ALL_MEMBERS_CAN_INVITE ALL_MANAGERS_CAN_INVITE ALL_OWNERS_CAN_INVITE
* NONE_CAN_INVITE
*/
whoCanInvite?: string;
/**
* Permissions to join the group. Possible values are: ANYONE_CAN_JOIN
* ALL_IN_DOMAIN_CAN_JOIN INVITED_CAN_JOIN CAN_REQUEST_TO_JOIN
*/
whoCanJoin?: string;
/**
* Permission to leave the group. Possible values are:
* ALL_MANAGERS_CAN_LEAVE ALL_OWNERS_CAN_LEAVE ALL_MEMBERS_CAN_LEAVE
* NONE_CAN_LEAVE
*/
whoCanLeaveGroup?: string;
/**
* Permission to lock topics. Possible values are: NONE OWNERS_ONLY
* OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanLockTopics?: string;
/**
* Permission to make topics appear at the top of the topic list. Possible
* values are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS
* ALL_MEMBERS
*/
whoCanMakeTopicsSticky?: string;
/**
* Permission to mark a topic as a duplicate of another topic. Possible
* values are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS
* ALL_MEMBERS
*/
whoCanMarkDuplicate?: string;
/**
* Permission to mark any other user's post as a favorite reply.
* Possible values are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS
* ALL_MEMBERS
*/
whoCanMarkFavoriteReplyOnAnyTopic?: string;
/**
* Permission to mark a post for a topic they started as a favorite reply.
* Possible values are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS
* ALL_MEMBERS
*/
whoCanMarkFavoriteReplyOnOwnTopic?: string;
/**
* Permission to mark a topic as not needing a response. Possible values
* are: NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanMarkNoResponseNeeded?: string;
/**
* Permission for content moderation. Possible values are: NONE OWNERS_ONLY
* OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanModerateContent?: string;
/**
* Permission for membership moderation. Possible values are: NONE
* OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanModerateMembers?: string;
/**
* Permission to modify members (change member roles). Possible values are:
* NONE OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanModifyMembers?: string;
/**
* Permission to change tags and categories. Possible values are: NONE
* OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanModifyTagsAndCategories?: string;
/**
* Permission to move topics into the group or forum. Possible values are:
* NONE OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanMoveTopicsIn?: string;
/**
* Permission to move topics out of the group or forum. Possible values are:
* NONE OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanMoveTopicsOut?: string;
/**
* Permission to post announcements, a special topic type. Possible values
* are: NONE OWNERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanPostAnnouncements?: string;
/**
* Permissions to post messages to the group. Possible values are:
* NONE_CAN_POST ALL_MANAGERS_CAN_POST ALL_MEMBERS_CAN_POST
* ALL_OWNERS_CAN_POST ALL_IN_DOMAIN_CAN_POST ANYONE_CAN_POST
*/
whoCanPostMessage?: string;
/**
* Permission to take topics in a forum. Possible values are: NONE
* OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanTakeTopics?: string;
/**
* Permission to unassign any topic in a forum. Possible values are: NONE
* OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanUnassignTopic?: string;
/**
* Permission to unmark any post from a favorite reply. Possible values are:
* NONE OWNERS_ONLY MANAGERS_ONLY OWNERS_AND_MANAGERS ALL_MEMBERS
*/
whoCanUnmarkFavoriteReplyOnAnyTopic?: string;
/**
* Permissions to view group. Possible values are: ANYONE_CAN_VIEW
* ALL_IN_DOMAIN_CAN_VIEW ALL_MEMBERS_CAN_VIEW ALL_MANAGERS_CAN_VIEW
* ALL_OWNERS_CAN_VIEW
*/
whoCanViewGroup?: string;
/**
* Permissions to view membership. Possible values are:
* ALL_IN_DOMAIN_CAN_VIEW ALL_MEMBERS_CAN_VIEW ALL_MANAGERS_CAN_VIEW
* ALL_OWNERS_CAN_VIEW
*/
whoCanViewMembership?: string;
}
class Resource$Groups {
context: APIRequestContext;
constructor(context: APIRequestContext);
/**
* groupsSettings.groups.get
* @desc Gets one resource by id.
* @alias groupsSettings.groups.get
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {string} params.groupUniqueId The resource ID
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
get(params?: Params$Resource$Groups$Get, options?: MethodOptions): GaxiosPromise<Schema$Groups>;
get(params: Params$Resource$Groups$Get, options: MethodOptions | BodyResponseCallback<Schema$Groups>, callback: BodyResponseCallback<Schema$Groups>): void;
get(params: Params$Resource$Groups$Get, callback: BodyResponseCallback<Schema$Groups>): void;
get(callback: BodyResponseCallback<Schema$Groups>): void;
/**
* groupsSettings.groups.patch
* @desc Updates an existing resource. This method supports patch semantics.
* @alias groupsSettings.groups.patch
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {string} params.groupUniqueId The resource ID
* @param {().Groups} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
patch(params?: Params$Resource$Groups$Patch, options?: MethodOptions): GaxiosPromise<Schema$Groups>;
patch(params: Params$Resource$Groups$Patch, options: MethodOptions | BodyResponseCallback<Schema$Groups>, callback: BodyResponseCallback<Schema$Groups>): void;
patch(params: Params$Resource$Groups$Patch, callback: BodyResponseCallback<Schema$Groups>): void;
patch(callback: BodyResponseCallback<Schema$Groups>): void;
/**
* groupsSettings.groups.update
* @desc Updates an existing resource.
* @alias groupsSettings.groups.update
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {string} params.groupUniqueId The resource ID
* @param {().Groups} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
update(params?: Params$Resource$Groups$Update, options?: MethodOptions): GaxiosPromise<Schema$Groups>;
update(params: Params$Resource$Groups$Update, options: MethodOptions | BodyResponseCallback<Schema$Groups>, callback: BodyResponseCallback<Schema$Groups>): void;
update(params: Params$Resource$Groups$Update, callback: BodyResponseCallback<Schema$Groups>): void;
update(callback: BodyResponseCallback<Schema$Groups>): void;
}
interface Params$Resource$Groups$Get extends StandardParameters {
/**
* Auth client or API Key for the request
*/
auth?: string | OAuth2Client | JWT | Compute | UserRefreshClient;
/**
* The resource ID
*/
groupUniqueId?: string;
}
interface Params$Resource$Groups$Patch extends StandardParameters {
/**
* Auth client or API Key for the request
*/
auth?: string | OAuth2Client | JWT | Compute | UserRefreshClient;
/**
* The resource ID
*/
groupUniqueId?: string;
/**
* Request body metadata
*/
requestBody?: Schema$Groups;
}
interface Params$Resource$Groups$Update extends StandardParameters {
/**
* Auth client or API Key for the request
*/
auth?: string | OAuth2Client | JWT | Compute | UserRefreshClient;
/**
* The resource ID
*/
groupUniqueId?: string;
/**
* Request body metadata
*/
requestBody?: Schema$Groups;
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
import {ARN} from "..";
/**
* Manages an EC2 Transit Gateway.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as aws from "@pulumi/aws";
*
* const example = new aws.ec2transitgateway.TransitGateway("example", {
* description: "example",
* });
* ```
*
* ## Import
*
* `aws_ec2_transit_gateway` can be imported by using the EC2 Transit Gateway identifier, e.g.
*
* ```sh
* $ pulumi import aws:ec2transitgateway/transitGateway:TransitGateway example tgw-12345678
* ```
*/
export class TransitGateway extends pulumi.CustomResource {
/**
* Get an existing TransitGateway resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: TransitGatewayState, opts?: pulumi.CustomResourceOptions): TransitGateway {
return new TransitGateway(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'aws:ec2transitgateway/transitGateway:TransitGateway';
/**
* Returns true if the given object is an instance of TransitGateway. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is TransitGateway {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === TransitGateway.__pulumiType;
}
/**
* Private Autonomous System Number (ASN) for the Amazon side of a BGP session. The range is `64512` to `65534` for 16-bit ASNs and `4200000000` to `4294967294` for 32-bit ASNs. Default value: `64512`.
*/
public readonly amazonSideAsn!: pulumi.Output<number | undefined>;
/**
* EC2 Transit Gateway Amazon Resource Name (ARN)
*/
public /*out*/ readonly arn!: pulumi.Output<ARN>;
/**
* Identifier of the default association route table
*/
public /*out*/ readonly associationDefaultRouteTableId!: pulumi.Output<string>;
/**
* Whether resource attachment requests are automatically accepted. Valid values: `disable`, `enable`. Default value: `disable`.
*/
public readonly autoAcceptSharedAttachments!: pulumi.Output<string | undefined>;
/**
* Whether resource attachments are automatically associated with the default association route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
public readonly defaultRouteTableAssociation!: pulumi.Output<string | undefined>;
/**
* Whether resource attachments automatically propagate routes to the default propagation route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
public readonly defaultRouteTablePropagation!: pulumi.Output<string | undefined>;
/**
* Description of the EC2 Transit Gateway.
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* Whether DNS support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
public readonly dnsSupport!: pulumi.Output<string | undefined>;
/**
* Identifier of the AWS account that owns the EC2 Transit Gateway
*/
public /*out*/ readonly ownerId!: pulumi.Output<string>;
/**
* Identifier of the default propagation route table
*/
public /*out*/ readonly propagationDefaultRouteTableId!: pulumi.Output<string>;
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
public /*out*/ readonly tagsAll!: pulumi.Output<{[key: string]: string}>;
/**
* Whether VPN Equal Cost Multipath Protocol support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
public readonly vpnEcmpSupport!: pulumi.Output<string | undefined>;
/**
* Create a TransitGateway resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args?: TransitGatewayArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: TransitGatewayArgs | TransitGatewayState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as TransitGatewayState | undefined;
inputs["amazonSideAsn"] = state ? state.amazonSideAsn : undefined;
inputs["arn"] = state ? state.arn : undefined;
inputs["associationDefaultRouteTableId"] = state ? state.associationDefaultRouteTableId : undefined;
inputs["autoAcceptSharedAttachments"] = state ? state.autoAcceptSharedAttachments : undefined;
inputs["defaultRouteTableAssociation"] = state ? state.defaultRouteTableAssociation : undefined;
inputs["defaultRouteTablePropagation"] = state ? state.defaultRouteTablePropagation : undefined;
inputs["description"] = state ? state.description : undefined;
inputs["dnsSupport"] = state ? state.dnsSupport : undefined;
inputs["ownerId"] = state ? state.ownerId : undefined;
inputs["propagationDefaultRouteTableId"] = state ? state.propagationDefaultRouteTableId : undefined;
inputs["tags"] = state ? state.tags : undefined;
inputs["tagsAll"] = state ? state.tagsAll : undefined;
inputs["vpnEcmpSupport"] = state ? state.vpnEcmpSupport : undefined;
} else {
const args = argsOrState as TransitGatewayArgs | undefined;
inputs["amazonSideAsn"] = args ? args.amazonSideAsn : undefined;
inputs["autoAcceptSharedAttachments"] = args ? args.autoAcceptSharedAttachments : undefined;
inputs["defaultRouteTableAssociation"] = args ? args.defaultRouteTableAssociation : undefined;
inputs["defaultRouteTablePropagation"] = args ? args.defaultRouteTablePropagation : undefined;
inputs["description"] = args ? args.description : undefined;
inputs["dnsSupport"] = args ? args.dnsSupport : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["vpnEcmpSupport"] = args ? args.vpnEcmpSupport : undefined;
inputs["arn"] = undefined /*out*/;
inputs["associationDefaultRouteTableId"] = undefined /*out*/;
inputs["ownerId"] = undefined /*out*/;
inputs["propagationDefaultRouteTableId"] = undefined /*out*/;
inputs["tagsAll"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(TransitGateway.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering TransitGateway resources.
*/
export interface TransitGatewayState {
/**
* Private Autonomous System Number (ASN) for the Amazon side of a BGP session. The range is `64512` to `65534` for 16-bit ASNs and `4200000000` to `4294967294` for 32-bit ASNs. Default value: `64512`.
*/
amazonSideAsn?: pulumi.Input<number>;
/**
* EC2 Transit Gateway Amazon Resource Name (ARN)
*/
arn?: pulumi.Input<ARN>;
/**
* Identifier of the default association route table
*/
associationDefaultRouteTableId?: pulumi.Input<string>;
/**
* Whether resource attachment requests are automatically accepted. Valid values: `disable`, `enable`. Default value: `disable`.
*/
autoAcceptSharedAttachments?: pulumi.Input<string>;
/**
* Whether resource attachments are automatically associated with the default association route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
defaultRouteTableAssociation?: pulumi.Input<string>;
/**
* Whether resource attachments automatically propagate routes to the default propagation route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
defaultRouteTablePropagation?: pulumi.Input<string>;
/**
* Description of the EC2 Transit Gateway.
*/
description?: pulumi.Input<string>;
/**
* Whether DNS support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
dnsSupport?: pulumi.Input<string>;
/**
* Identifier of the AWS account that owns the EC2 Transit Gateway
*/
ownerId?: pulumi.Input<string>;
/**
* Identifier of the default propagation route table
*/
propagationDefaultRouteTableId?: pulumi.Input<string>;
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Whether VPN Equal Cost Multipath Protocol support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
vpnEcmpSupport?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a TransitGateway resource.
*/
export interface TransitGatewayArgs {
/**
* Private Autonomous System Number (ASN) for the Amazon side of a BGP session. The range is `64512` to `65534` for 16-bit ASNs and `4200000000` to `4294967294` for 32-bit ASNs. Default value: `64512`.
*/
amazonSideAsn?: pulumi.Input<number>;
/**
* Whether resource attachment requests are automatically accepted. Valid values: `disable`, `enable`. Default value: `disable`.
*/
autoAcceptSharedAttachments?: pulumi.Input<string>;
/**
* Whether resource attachments are automatically associated with the default association route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
defaultRouteTableAssociation?: pulumi.Input<string>;
/**
* Whether resource attachments automatically propagate routes to the default propagation route table. Valid values: `disable`, `enable`. Default value: `enable`.
*/
defaultRouteTablePropagation?: pulumi.Input<string>;
/**
* Description of the EC2 Transit Gateway.
*/
description?: pulumi.Input<string>;
/**
* Whether DNS support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
dnsSupport?: pulumi.Input<string>;
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* Whether VPN Equal Cost Multipath Protocol support is enabled. Valid values: `disable`, `enable`. Default value: `enable`.
*/
vpnEcmpSupport?: pulumi.Input<string>;
} | the_stack |
import {
Component,
OnInit,
Input,
ViewChild,
HostBinding,
Renderer2,
ElementRef,
ChangeDetectionStrategy,
OnDestroy,
ChangeDetectorRef,
NgZone,
Injector,
forwardRef,
OnChanges,
SimpleChanges,
AfterViewChecked,
DoCheck
} from '@angular/core';
import { NODE_TO_ELEMENT, IS_FOCUSED, EDITOR_TO_ELEMENT, ELEMENT_TO_NODE, IS_READONLY, EDITOR_TO_ON_CHANGE, EDITOR_TO_WINDOW } from '../../utils/weak-maps';
import { Text as SlateText, Element, Transforms, Editor, Range, Path, NodeEntry, Node, Descendant } from 'slate';
import getDirection from 'direction';
import { AngularEditor } from '../../plugins/angular-editor';
import {
DOMElement,
DOMNode,
isDOMNode,
DOMStaticRange,
DOMRange,
isDOMElement,
isPlainTextOnlyPaste,
DOMSelection,
getDefaultView
} from '../../utils/dom';
import { Subject, interval } from 'rxjs';
import { takeUntil, throttle } from 'rxjs/operators';
import { IS_FIREFOX, IS_SAFARI, IS_EDGE_LEGACY, IS_CHROME_LEGACY, IS_CHROME } from '../../utils/environment';
import Hotkeys from '../../utils/hotkeys';
import { BeforeInputEvent, extractBeforeInputEvent } from '../../custom-event/BeforeInputEventPlugin';
import { BEFORE_INPUT_EVENTS } from '../../custom-event/before-input-polyfill';
import { SlateErrorCode } from '../../types/error';
import Debug from 'debug';
import { SlateStringTemplateComponent } from '../string/template.component';
import { NG_VALUE_ACCESSOR } from '@angular/forms';
import { SlateChildrenContext, SlateViewContext } from '../../view/context';
import { ViewType } from '../../types/view';
import { HistoryEditor } from 'slate-history';
import { isDecoratorRangeListEqual } from '../../utils';
import { check, normalize } from '../../utils/global-normalize';
import { SlatePlaceholder } from '../../types/feature';
const timeDebug = Debug('slate-angular-time');
// COMPAT: Firefox/Edge Legacy don't support the `beforeinput` event
// Chrome Legacy doesn't support `beforeinput` correctly
const HAS_BEFORE_INPUT_SUPPORT =
!IS_CHROME_LEGACY &&
!IS_EDGE_LEGACY &&
globalThis.InputEvent &&
// @ts-ignore The `getTargetRanges` property isn't recognized.
typeof globalThis.InputEvent.prototype.getTargetRanges === 'function'
// not correctly clipboardData on beforeinput
const forceOnDOMPaste = IS_SAFARI;
@Component({
selector: 'slate-editable',
host: {
class: 'slate-editable-container',
'[attr.contenteditable]': 'readonly ? undefined : true',
'[attr.role]': `readonly ? undefined : 'textbox'`,
'[attr.spellCheck]': `!hasBeforeInputSupport ? false : spellCheck`,
'[attr.autoCorrect]': `!hasBeforeInputSupport ? 'false' : autoCorrect`,
'[attr.autoCapitalize]': `!hasBeforeInputSupport ? 'false' : autoCapitalize`
},
templateUrl: 'editable.component.html',
changeDetection: ChangeDetectionStrategy.OnPush,
providers: [{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => SlateEditableComponent),
multi: true
}]
})
export class SlateEditableComponent implements OnInit, OnChanges, OnDestroy, AfterViewChecked, DoCheck {
viewContext: SlateViewContext;
context: SlateChildrenContext;
private destroy$ = new Subject();
isComposing = false;
isDraggingInternally = false;
isUpdatingSelection = false;
latestElement = null as DOMElement | null;
protected manualListeners: (() => void)[] = [];
private initialized: boolean;
private onTouchedCallback: () => void = () => { };
private onChangeCallback: (_: any) => void = () => { };
@Input() editor: AngularEditor;
@Input() renderElement: (element: Element) => ViewType | null;
@Input() renderLeaf: (text: SlateText) => ViewType | null;
@Input() renderText: (text: SlateText) => ViewType | null;
@Input() decorate: (entry: NodeEntry) => Range[] = () => [];
@Input() placeholderDecorate: (editor: Editor) => SlatePlaceholder[] = (editor) => {
if (this.placeholder &&
editor.children.length === 1 &&
Array.from(Node.texts(editor)).length === 1 &&
Node.string(editor) === '') {
const start = Editor.start(editor, []);
return [{
placeholder: this.placeholder,
anchor: start,
focus: start
}];
} else {
return [];
}
};
@Input() isStrictDecorate: boolean = true;
@Input() trackBy: (node: Element) => any = () => null;
@Input() readonly = false;
@Input() placeholder: string;
//#region input event handler
@Input() beforeInput: (event: Event) => void;
@Input() blur: (event: Event) => void;
@Input() click: (event: MouseEvent) => void;
@Input() compositionEnd: (event: CompositionEvent) => void;
@Input() compositionStart: (event: CompositionEvent) => void;
@Input() copy: (event: ClipboardEvent) => void;
@Input() cut: (event: ClipboardEvent) => void;
@Input() dragOver: (event: DragEvent) => void;
@Input() dragStart: (event: DragEvent) => void;
@Input() dragEnd: (event: DragEvent) => void;
@Input() drop: (event: DragEvent) => void;
@Input() focus: (event: Event) => void;
@Input() keydown: (event: KeyboardEvent) => void;
@Input() paste: (event: KeyboardEvent) => void;
//#endregion
//#region DOM attr
@Input() spellCheck = false;
@Input() autoCorrect = false;
@Input() autoCapitalize = false;
@HostBinding('attr.data-slate-editor') dataSlateEditor = true;
@HostBinding('attr.data-slate-node') dataSlateNode = 'value';
@HostBinding('attr.data-gramm') dataGramm = false;
get hasBeforeInputSupport() {
return HAS_BEFORE_INPUT_SUPPORT;
}
//#endregion
@ViewChild('templateComponent', { static: true }) templateComponent: SlateStringTemplateComponent;
@ViewChild('templateComponent', { static: true, read: ElementRef }) templateElementRef: ElementRef<any>;
constructor(
public elementRef: ElementRef,
public renderer2: Renderer2,
public cdr: ChangeDetectorRef,
private ngZone: NgZone,
private injector: Injector
) { }
ngOnInit() {
this.editor.injector = this.injector;
this.editor.children = [];
let window = getDefaultView(this.elementRef.nativeElement);
EDITOR_TO_WINDOW.set(this.editor, window);
EDITOR_TO_ELEMENT.set(this.editor, this.elementRef.nativeElement);
NODE_TO_ELEMENT.set(this.editor, this.elementRef.nativeElement);
ELEMENT_TO_NODE.set(this.elementRef.nativeElement, this.editor);
IS_READONLY.set(this.editor, this.readonly);
EDITOR_TO_ON_CHANGE.set(this.editor, () => {
this.ngZone.run(() => {
this.onChange();
});
});
this.ngZone.runOutsideAngular(() => {
this.initialize();
});
this.initializeViewContext();
this.initializeContext();
// remove unused DOM, just keep templateComponent instance
this.templateElementRef.nativeElement.remove();
}
ngOnChanges(simpleChanges: SimpleChanges) {
if (!this.initialized) {
return;
}
const decorateChange = simpleChanges['decorate'];
if (decorateChange) {
this.forceFlush();
}
const readonlyChange = simpleChanges['readonly'];
if (readonlyChange) {
IS_READONLY.set(this.editor, this.readonly);
this.detectContext();
this.toSlateSelection();
}
}
registerOnChange(fn: any) {
this.onChangeCallback = fn;
}
registerOnTouched(fn: any) {
this.onTouchedCallback = fn;
}
writeValue(value: Element[]) {
if (value && value.length) {
if (check(value)) {
this.editor.children = value;
} else {
this.editor.onError({
code: SlateErrorCode.InvalidValueError,
name: 'initialize invalid data',
data: value
});
this.editor.children = normalize(value);
}
this.initializeContext();
this.cdr.markForCheck();
}
}
initialize() {
this.initialized = true;
const window = AngularEditor.getWindow(this.editor);
this.addEventListener(
'selectionchange',
event => {
this.toSlateSelection();
},
window.document
);
if (HAS_BEFORE_INPUT_SUPPORT) {
this.addEventListener('beforeinput', this.onDOMBeforeInput.bind(this));
}
this.addEventListener('blur', this.onDOMBlur.bind(this));
this.addEventListener('click', this.onDOMClick.bind(this));
this.addEventListener('compositionend', this.onDOMCompositionEnd.bind(this));
this.addEventListener('compositionstart', this.onDOMCompositionStart.bind(this));
this.addEventListener('copy', this.onDOMCopy.bind(this));
this.addEventListener('cut', this.onDOMCut.bind(this));
this.addEventListener('dragover', this.onDOMDragOver.bind(this));
this.addEventListener('dragstart', this.onDOMDragStart.bind(this));
this.addEventListener('dragend', this.onDOMDragEnd.bind(this));
this.addEventListener('drop', this.onDOMDrop.bind(this));
this.addEventListener('focus', this.onDOMFocus.bind(this));
this.addEventListener('keydown', this.onDOMKeydown.bind(this));
this.addEventListener('paste', this.onDOMPaste.bind(this));
BEFORE_INPUT_EVENTS.forEach(event => {
this.addEventListener(event.name, () => { });
});
}
toNativeSelection() {
try {
const { selection } = this.editor;
const root = AngularEditor.findDocumentOrShadowRoot(this.editor)
const domSelection = root.getSelection();
if (this.isComposing || !domSelection || !AngularEditor.isFocused(this.editor)) {
return;
}
const hasDomSelection = domSelection.type !== 'None';
// If the DOM selection is properly unset, we're done.
if (!selection && !hasDomSelection) {
return;
}
// If the DOM selection is already correct, we're done.
// verify that the dom selection is in the editor
const editorElement = EDITOR_TO_ELEMENT.get(this.editor)!;
let hasDomSelectionInEditor = false;
if (editorElement.contains(domSelection.anchorNode) && editorElement.contains(domSelection.focusNode)) {
hasDomSelectionInEditor = true;
}
// If the DOM selection is in the editor and the editor selection is already correct, we're done.
if (
hasDomSelection &&
hasDomSelectionInEditor &&
selection &&
hasStringTarget(domSelection) &&
Range.equals(AngularEditor.toSlateRange(this.editor, domSelection), selection)
) {
return;
}
// when <Editable/> is being controlled through external value
// then its children might just change - DOM responds to it on its own
// but Slate's value is not being updated through any operation
// and thus it doesn't transform selection on its own
if (selection && !AngularEditor.hasRange(this.editor, selection)) {
this.editor.selection = AngularEditor.toSlateRange(this.editor, domSelection);
return
}
// Otherwise the DOM selection is out of sync, so update it.
const el = AngularEditor.toDOMNode(this.editor, this.editor);
this.isUpdatingSelection = true;
const newDomRange = selection && AngularEditor.toDOMRange(this.editor, selection);
if (newDomRange) {
// COMPAT: Since the DOM range has no concept of backwards/forwards
// we need to check and do the right thing here.
if (Range.isBackward(selection)) {
// eslint-disable-next-line max-len
domSelection.setBaseAndExtent(
newDomRange.endContainer,
newDomRange.endOffset,
newDomRange.startContainer,
newDomRange.startOffset
);
} else {
// eslint-disable-next-line max-len
domSelection.setBaseAndExtent(
newDomRange.startContainer,
newDomRange.startOffset,
newDomRange.endContainer,
newDomRange.endOffset
);
}
}
setTimeout(() => {
// COMPAT: In Firefox, it's not enough to create a range, you also need
// to focus the contenteditable element too. (2016/11/16)
if (newDomRange && IS_FIREFOX) {
el.focus();
}
this.isUpdatingSelection = false;
});
} catch (error) {
this.editor.onError({ code: SlateErrorCode.ToNativeSelectionError, nativeError: error })
}
}
onChange() {
this.forceFlush();
this.onChangeCallback(this.editor.children);
}
ngAfterViewChecked() {
timeDebug('editable ngAfterViewChecked');
}
ngDoCheck() {
timeDebug('editable ngDoCheck');
}
forceFlush() {
timeDebug('start data sync');
this.detectContext();
this.cdr.detectChanges();
// repair collaborative editing when Chinese input is interrupted by other users' cursors
// when the DOMElement where the selection is located is removed
// the compositionupdate and compositionend events will no longer be fired
// so isComposing needs to be corrected
// need exec after this.cdr.detectChanges() to render HTML
// need exec before this.toNativeSelection() to correct native selection
if (this.isComposing) {
// Composition input text be not rendered when user composition input with selection is expanded
// At this time, the following matching conditions are met, assign isComposing to false, and the status is wrong
// this time condition is true and isComposiing is assigned false
// Therefore, need to wait for the composition input text to be rendered before performing condition matching
setTimeout(() => {
const textNode = Node.get(this.editor, this.editor.selection.anchor.path);
const textDOMNode = AngularEditor.toDOMNode(this.editor, textNode);
let textContent = '';
// skip decorate text
textDOMNode.querySelectorAll('[editable-text]').forEach((stringDOMNode) => {
let text = stringDOMNode.textContent;
const zeroChar = '\uFEFF';
// remove zero with char
if (text.startsWith(zeroChar)) {
text = text.slice(1);
}
if (text.endsWith(zeroChar)) {
text = text.slice(0, text.length - 1);
}
textContent += text;
});
if (Node.string(textNode).endsWith(textContent)) {
this.isComposing = false;
}
}, 0);
}
this.toNativeSelection();
timeDebug('end data sync');
}
initializeContext() {
this.context = {
parent: this.editor,
selection: this.editor.selection,
decorations: this.generateDecorations(),
decorate: this.decorate,
readonly: this.readonly
};
}
initializeViewContext() {
this.viewContext = {
editor: this.editor,
renderElement: this.renderElement,
renderLeaf: this.renderLeaf,
renderText: this.renderText,
trackBy: this.trackBy,
isStrictDecorate: this.isStrictDecorate,
templateComponent: this.templateComponent
};
}
detectContext() {
const decorations = this.generateDecorations();
if (this.context.selection !== this.editor.selection ||
this.context.decorate !== this.decorate ||
this.context.readonly !== this.readonly ||
!isDecoratorRangeListEqual(this.context.decorations, decorations)) {
this.context = {
parent: this.editor,
selection: this.editor.selection,
decorations: decorations,
decorate: this.decorate,
readonly: this.readonly
};
}
}
generateDecorations() {
const decorations = this.decorate([this.editor, []]);
const placeholderDecorations = this.isComposing ? [] : this.placeholderDecorate(this.editor);
decorations.push(...placeholderDecorations);
return decorations;
}
//#region event proxy
private addEventListener(eventName: string, listener: EventListener, target: HTMLElement | Document = this.elementRef.nativeElement) {
this.manualListeners.push(
this.renderer2.listen(target, eventName, (event: Event) => {
const beforeInputEvent = extractBeforeInputEvent(event.type, null, event, event.target);
if (beforeInputEvent) {
this.onFallbackBeforeInput(beforeInputEvent);
}
listener(event);
})
);
}
private toSlateSelection() {
if (!this.readonly && !this.isComposing && !this.isUpdatingSelection && !this.isDraggingInternally) {
try {
const root = AngularEditor.findDocumentOrShadowRoot(this.editor)
const { activeElement } = root;
const el = AngularEditor.toDOMNode(this.editor, this.editor);
const domSelection = root.getSelection();
if (activeElement === el || hasEditableTarget(this.editor, activeElement)) {
this.latestElement = activeElement;
IS_FOCUSED.set(this.editor, true);
} else {
IS_FOCUSED.delete(this.editor);
}
if (!domSelection) {
return Transforms.deselect(this.editor);
}
const editorElement = EDITOR_TO_ELEMENT.get(this.editor);
const hasDomSelectionInEditor = editorElement.contains(domSelection.anchorNode) && editorElement.contains(domSelection.focusNode);
if (!hasDomSelectionInEditor) {
Transforms.deselect(this.editor);
return;
}
// try to get the selection directly, because some terrible case can be normalize for normalizeDOMPoint
// for example, double-click the last cell of the table to select a non-editable DOM
const range = AngularEditor.toSlateRange(this.editor, domSelection);
if (this.editor.selection && Range.equals(range, this.editor.selection) && !hasStringTarget(domSelection)) {
// force adjust DOMSelection
this.toNativeSelection();
} else {
Transforms.select(this.editor, range);
}
} catch (error) {
this.editor.onError({ code: SlateErrorCode.ToSlateSelectionError, nativeError: error })
}
}
}
private onDOMBeforeInput(
event: Event & {
inputType: string;
isComposing: boolean;
data: string | null;
dataTransfer: DataTransfer | null;
getTargetRanges(): DOMStaticRange[];
}
) {
const editor = this.editor;
if (!this.readonly && hasEditableTarget(editor, event.target) && !this.isDOMEventHandled(event, this.beforeInput)) {
try {
const { selection } = editor;
const { inputType: type } = event;
const data = event.dataTransfer || event.data || undefined;
event.preventDefault();
// COMPAT: If the selection is expanded, even if the command seems like
// a delete forward/backward command it should delete the selection.
if (selection && Range.isExpanded(selection) && type.startsWith('delete')) {
const direction = type.endsWith('Backward') ? 'backward' : 'forward';
Editor.deleteFragment(editor, { direction });
return;
}
switch (type) {
case 'deleteByComposition':
case 'deleteByCut':
case 'deleteByDrag': {
Editor.deleteFragment(editor);
break;
}
case 'deleteContent':
case 'deleteContentForward': {
Editor.deleteForward(editor);
break;
}
case 'deleteContentBackward': {
Editor.deleteBackward(editor);
break;
}
case 'deleteEntireSoftLine': {
Editor.deleteBackward(editor, { unit: 'line' });
Editor.deleteForward(editor, { unit: 'line' });
break;
}
case 'deleteHardLineBackward': {
Editor.deleteBackward(editor, { unit: 'block' });
break;
}
case 'deleteSoftLineBackward': {
Editor.deleteBackward(editor, { unit: 'line' });
break;
}
case 'deleteHardLineForward': {
Editor.deleteForward(editor, { unit: 'block' });
break;
}
case 'deleteSoftLineForward': {
Editor.deleteForward(editor, { unit: 'line' });
break;
}
case 'deleteWordBackward': {
Editor.deleteBackward(editor, { unit: 'word' });
break;
}
case 'deleteWordForward': {
Editor.deleteForward(editor, { unit: 'word' });
break;
}
case 'insertLineBreak':
case 'insertParagraph': {
Editor.insertBreak(editor);
break;
}
case 'insertFromComposition': {
// COMPAT: in safari, `compositionend` event is dispatched after
// the beforeinput event with the inputType "insertFromComposition" has been dispatched.
// https://www.w3.org/TR/input-events-2/
// so the following code is the right logic
// because DOM selection in sync will be exec before `compositionend` event
// isComposing is true will prevent DOM selection being update correctly.
this.isComposing = false;
preventInsertFromComposition(event, this.editor);
}
case 'insertFromDrop':
case 'insertFromPaste':
case 'insertFromYank':
case 'insertReplacementText':
case 'insertText': {
if (data instanceof DataTransfer) {
AngularEditor.insertData(editor, data);
} else if (typeof data === 'string') {
Editor.insertText(editor, data);
}
break;
}
}
} catch (error) {
this.editor.onError({ code: SlateErrorCode.OnDOMBeforeInputError, nativeError: error });
}
}
}
private onDOMBlur(event: FocusEvent) {
if (
this.readonly ||
this.isUpdatingSelection ||
!hasEditableTarget(this.editor, event.target) ||
this.isDOMEventHandled(event, this.blur)
) {
return;
}
const window = AngularEditor.getWindow(this.editor);
// COMPAT: If the current `activeElement` is still the previous
// one, this is due to the window being blurred when the tab
// itself becomes unfocused, so we want to abort early to allow to
// editor to stay focused when the tab becomes focused again.
const root = AngularEditor.findDocumentOrShadowRoot(this.editor);
if (this.latestElement === root.activeElement) {
return;
}
const { relatedTarget } = event;
const el = AngularEditor.toDOMNode(this.editor, this.editor);
// COMPAT: The event should be ignored if the focus is returning
// to the editor from an embedded editable element (eg. an <input>
// element inside a void node).
if (relatedTarget === el) {
return;
}
// COMPAT: The event should be ignored if the focus is moving from
// the editor to inside a void node's spacer element.
if (isDOMElement(relatedTarget) && relatedTarget.hasAttribute('data-slate-spacer')) {
return;
}
// COMPAT: The event should be ignored if the focus is moving to a
// non- editable section of an element that isn't a void node (eg.
// a list item of the check list example).
if (relatedTarget != null && isDOMNode(relatedTarget) && AngularEditor.hasDOMNode(this.editor, relatedTarget)) {
const node = AngularEditor.toSlateNode(this.editor, relatedTarget);
if (Element.isElement(node) && !this.editor.isVoid(node)) {
return;
}
}
IS_FOCUSED.delete(this.editor);
}
private onDOMClick(event: MouseEvent) {
if (
!this.readonly &&
hasTarget(this.editor, event.target) &&
!this.isDOMEventHandled(event, this.click) &&
isDOMNode(event.target)
) {
const node = AngularEditor.toSlateNode(this.editor, event.target);
const path = AngularEditor.findPath(this.editor, node);
const start = Editor.start(this.editor, path);
const end = Editor.end(this.editor, path);
const startVoid = Editor.void(this.editor, { at: start });
const endVoid = Editor.void(this.editor, { at: end });
if (startVoid && endVoid && Path.equals(startVoid[1], endVoid[1])) {
const range = Editor.range(this.editor, start);
Transforms.select(this.editor, range);
}
}
}
private onDOMCompositionEnd(event: CompositionEvent) {
if (!event.data && !Range.isCollapsed(this.editor.selection)) {
Transforms.delete(this.editor);
}
if (hasEditableTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.compositionEnd)) {
// COMPAT: In Chrome/Firefox, `beforeinput` events for compositions
// aren't correct and never fire the "insertFromComposition"
// type that we need. So instead, insert whenever a composition
// ends since it will already have been committed to the DOM.
if (this.isComposing === true && !IS_SAFARI && event.data) {
preventInsertFromComposition(event, this.editor);
Editor.insertText(this.editor, event.data);
}
// COMPAT: In Firefox 87.0 CompositionEnd fire twice
// so we need avoid repeat isnertText by isComposing === true,
this.isComposing = false;
}
this.detectContext();
this.cdr.detectChanges();
}
private onDOMCompositionStart(event: CompositionEvent) {
const { selection } = this.editor;
if (selection) {
// solve the problem of cross node Chinese input
if (Range.isExpanded(selection)) {
Editor.deleteFragment(this.editor);
this.forceFlush();
}
}
if (hasEditableTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.compositionStart)) {
this.isComposing = true;
}
this.detectContext();
this.cdr.detectChanges();
}
private onDOMCopy(event: ClipboardEvent) {
const window = AngularEditor.getWindow(this.editor);
const isOutsideSlate = !hasStringTarget(window.getSelection()) && isTargetInsideVoid(this.editor, event.target);
if (!isOutsideSlate && hasTarget(this.editor, event.target) && !this.readonly && !this.isDOMEventHandled(event, this.copy)) {
event.preventDefault();
AngularEditor.setFragmentData(this.editor, event.clipboardData);
}
}
private onDOMCut(event: ClipboardEvent) {
if (!this.readonly && hasEditableTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.cut)) {
event.preventDefault();
AngularEditor.setFragmentData(this.editor, event.clipboardData);
const { selection } = this.editor;
if (selection) {
AngularEditor.deleteCutData(this.editor);
}
}
}
private onDOMDragOver(event: DragEvent) {
if (hasTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.dragOver)) {
// Only when the target is void, call `preventDefault` to signal
// that drops are allowed. Editable content is droppable by
// default, and calling `preventDefault` hides the cursor.
const node = AngularEditor.toSlateNode(this.editor, event.target);
if (Editor.isVoid(this.editor, node)) {
event.preventDefault();
}
}
}
private onDOMDragStart(event: DragEvent) {
if (hasTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.dragStart)) {
const node = AngularEditor.toSlateNode(this.editor, event.target);
const path = AngularEditor.findPath(this.editor, node);
const voidMatch =
Editor.isVoid(this.editor, node) ||
Editor.void(this.editor, { at: path, voids: true });
// If starting a drag on a void node, make sure it is selected
// so that it shows up in the selection's fragment.
if (voidMatch) {
const range = Editor.range(this.editor, path);
Transforms.select(this.editor, range);
}
this.isDraggingInternally = true;
AngularEditor.setFragmentData(this.editor, event.dataTransfer);
}
}
private onDOMDrop(event: DragEvent) {
const editor = this.editor;
if (!this.readonly && hasTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.drop)) {
event.preventDefault();
// Keep a reference to the dragged range before updating selection
const draggedRange = editor.selection;
// Find the range where the drop happened
const range = AngularEditor.findEventRange(editor, event);
const data = event.dataTransfer;
Transforms.select(editor, range);
if (this.isDraggingInternally) {
if (draggedRange) {
Transforms.delete(editor, {
at: draggedRange,
});
}
this.isDraggingInternally = false;
}
AngularEditor.insertData(editor, data);
// When dragging from another source into the editor, it's possible
// that the current editor does not have focus.
if (!AngularEditor.isFocused(editor)) {
AngularEditor.focus(editor);
}
}
}
private onDOMDragEnd(event: DragEvent) {
if (!this.readonly && this.isDraggingInternally && hasTarget(this.editor, event.target) && !this.isDOMEventHandled(event, this.dragEnd)) {
this.isDraggingInternally = false;
}
}
private onDOMFocus(event: Event) {
if (
!this.readonly &&
!this.isUpdatingSelection &&
hasEditableTarget(this.editor, event.target) &&
!this.isDOMEventHandled(event, this.focus)
) {
const el = AngularEditor.toDOMNode(this.editor, this.editor);
const root = AngularEditor.findDocumentOrShadowRoot(this.editor);
this.latestElement = root.activeElement
// COMPAT: If the editor has nested editable elements, the focus
// can go to them. In Firefox, this must be prevented because it
// results in issues with keyboard navigation. (2017/03/30)
if (IS_FIREFOX && event.target !== el) {
el.focus();
return;
}
IS_FOCUSED.set(this.editor, true);
}
}
private onDOMKeydown(event: KeyboardEvent) {
const editor = this.editor;
if (
!this.readonly &&
hasEditableTarget(editor, event.target) &&
!this.isComposing &&
!this.isDOMEventHandled(event, this.keydown)
) {
const nativeEvent = event;
const { selection } = editor;
const element =
editor.children[
selection !== null ? selection.focus.path[0] : 0
]
const isRTL = getDirection(Node.string(element)) === 'rtl';
try {
// COMPAT: Since we prevent the default behavior on
// `beforeinput` events, the browser doesn't think there's ever
// any history stack to undo or redo, so we have to manage these
// hotkeys ourselves. (2019/11/06)
if (Hotkeys.isRedo(nativeEvent)) {
event.preventDefault();
if (HistoryEditor.isHistoryEditor(editor)) {
editor.redo();
}
return;
}
if (Hotkeys.isUndo(nativeEvent)) {
event.preventDefault();
if (HistoryEditor.isHistoryEditor(editor)) {
editor.undo();
}
return;
}
// COMPAT: Certain browsers don't handle the selection updates
// properly. In Chrome, the selection isn't properly extended.
// And in Firefox, the selection isn't properly collapsed.
// (2017/10/17)
if (Hotkeys.isMoveLineBackward(nativeEvent)) {
event.preventDefault();
Transforms.move(editor, { unit: 'line', reverse: true });
return;
}
if (Hotkeys.isMoveLineForward(nativeEvent)) {
event.preventDefault();
Transforms.move(editor, { unit: 'line' });
return;
}
if (Hotkeys.isExtendLineBackward(nativeEvent)) {
event.preventDefault();
Transforms.move(editor, {
unit: 'line',
edge: 'focus',
reverse: true
});
return;
}
if (Hotkeys.isExtendLineForward(nativeEvent)) {
event.preventDefault();
Transforms.move(editor, { unit: 'line', edge: 'focus' });
return;
}
// COMPAT: If a void node is selected, or a zero-width text node
// adjacent to an inline is selected, we need to handle these
// hotkeys manually because browsers won't be able to skip over
// the void node with the zero-width space not being an empty
// string.
if (Hotkeys.isMoveBackward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isCollapsed(selection)) {
Transforms.move(editor, { reverse: !isRTL });
} else {
Transforms.collapse(editor, { edge: 'start' });
}
return;
}
if (Hotkeys.isMoveForward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isCollapsed(selection)) {
Transforms.move(editor, { reverse: isRTL });
} else {
Transforms.collapse(editor, { edge: 'end' });
}
return;
}
if (Hotkeys.isMoveWordBackward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Transforms.collapse(editor, { edge: 'focus' })
}
Transforms.move(editor, { unit: 'word', reverse: !isRTL });
return;
}
if (Hotkeys.isMoveWordForward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Transforms.collapse(editor, { edge: 'focus' })
}
Transforms.move(editor, { unit: 'word', reverse: isRTL });
return;
}
// COMPAT: Certain browsers don't support the `beforeinput` event, so we
// fall back to guessing at the input intention for hotkeys.
// COMPAT: In iOS, some of these hotkeys are handled in the
if (!HAS_BEFORE_INPUT_SUPPORT) {
// We don't have a core behavior for these, but they change the
// DOM if we don't prevent them, so we have to.
if (Hotkeys.isBold(nativeEvent) || Hotkeys.isItalic(nativeEvent) || Hotkeys.isTransposeCharacter(nativeEvent)) {
event.preventDefault();
return;
}
if (Hotkeys.isSplitBlock(nativeEvent)) {
event.preventDefault();
Editor.insertBreak(editor);
return;
}
if (Hotkeys.isDeleteBackward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'backward' });
} else {
Editor.deleteBackward(editor);
}
return;
}
if (Hotkeys.isDeleteForward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'forward' });
} else {
Editor.deleteForward(editor);
}
return;
}
if (Hotkeys.isDeleteLineBackward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'backward' });
} else {
Editor.deleteBackward(editor, { unit: 'line' });
}
return;
}
if (Hotkeys.isDeleteLineForward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'forward' });
} else {
Editor.deleteForward(editor, { unit: 'line' });
}
return;
}
if (Hotkeys.isDeleteWordBackward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'backward' });
} else {
Editor.deleteBackward(editor, { unit: 'word' });
}
return;
}
if (Hotkeys.isDeleteWordForward(nativeEvent)) {
event.preventDefault();
if (selection && Range.isExpanded(selection)) {
Editor.deleteFragment(editor, { direction: 'forward' });
} else {
Editor.deleteForward(editor, { unit: 'word' });
}
return;
}
} else {
if (IS_CHROME || IS_SAFARI) {
// COMPAT: Chrome and Safari support `beforeinput` event but do not fire
// an event when deleting backwards in a selected void inline node
if (
selection &&
(Hotkeys.isDeleteBackward(nativeEvent) ||
Hotkeys.isDeleteForward(nativeEvent)) &&
Range.isCollapsed(selection)
) {
const currentNode = Node.parent(
editor,
selection.anchor.path
)
if (
Element.isElement(currentNode) &&
Editor.isVoid(editor, currentNode) &&
Editor.isInline(editor, currentNode)
) {
event.preventDefault()
Editor.deleteBackward(editor, { unit: 'block' })
return
}
}
}
}
} catch (error) {
this.editor.onError({ code: SlateErrorCode.OnDOMKeydownError, nativeError: error });
}
}
}
private onDOMPaste(event: ClipboardEvent) {
// COMPAT: Certain browsers don't support the `beforeinput` event, so we
// fall back to React's `onPaste` here instead.
// COMPAT: Firefox, Chrome and Safari are not emitting `beforeinput` events
// when "paste without formatting" option is used.
// This unfortunately needs to be handled with paste events instead.
if (
!this.isDOMEventHandled(event, this.paste) &&
(!HAS_BEFORE_INPUT_SUPPORT || isPlainTextOnlyPaste(event) || forceOnDOMPaste) &&
!this.readonly &&
hasEditableTarget(this.editor, event.target)
) {
event.preventDefault();
AngularEditor.insertData(this.editor, event.clipboardData);
}
}
private onFallbackBeforeInput(event: BeforeInputEvent) {
// COMPAT: Certain browsers don't support the `beforeinput` event, so we
// fall back to React's leaky polyfill instead just for it. It
// only works for the `insertText` input type.
if (
!HAS_BEFORE_INPUT_SUPPORT &&
!this.readonly &&
!this.isDOMEventHandled(event.nativeEvent, this.beforeInput) &&
hasEditableTarget(this.editor, event.nativeEvent.target)
) {
event.nativeEvent.preventDefault();
try {
const text = event.data;
if (!Range.isCollapsed(this.editor.selection)) {
Editor.deleteFragment(this.editor);
}
// just handle Non-IME input
if (!this.isComposing) {
Editor.insertText(this.editor, text);
}
} catch (error) {
this.editor.onError({ code: SlateErrorCode.ToNativeSelectionError, nativeError: error });
}
}
}
private isDOMEventHandled(event: Event, handler?: (event: Event) => void) {
if (!handler) {
return false;
}
handler(event);
return event.defaultPrevented;
}
//#endregion
ngOnDestroy() {
NODE_TO_ELEMENT.delete(this.editor);
this.manualListeners.forEach(manualListener => {
manualListener();
});
this.destroy$.complete();
EDITOR_TO_ON_CHANGE.delete(this.editor);
}
}
/**
* Check if the target is editable and in the editor.
*/
const hasEditableTarget = (editor: AngularEditor, target: EventTarget | null): target is DOMNode => {
return isDOMNode(target) && AngularEditor.hasDOMNode(editor, target, { editable: true });
};
/**
* Check if two DOM range objects are equal.
*/
const isRangeEqual = (a: DOMRange, b: DOMRange) => {
return (
(a.startContainer === b.startContainer &&
a.startOffset === b.startOffset &&
a.endContainer === b.endContainer &&
a.endOffset === b.endOffset) ||
(a.startContainer === b.endContainer &&
a.startOffset === b.endOffset &&
a.endContainer === b.startContainer &&
a.endOffset === b.startOffset)
);
};
/**
* Check if the target is in the editor.
*/
const hasTarget = (editor: AngularEditor, target: EventTarget | null): target is DOMNode => {
return isDOMNode(target) && AngularEditor.hasDOMNode(editor, target);
};
/**
* Check if the target is inside void and in the editor.
*/
const isTargetInsideVoid = (editor: AngularEditor, target: EventTarget | null): boolean => {
const slateNode = hasTarget(editor, target) && AngularEditor.toSlateNode(editor, target);
return Editor.isVoid(editor, slateNode);
};
const hasStringTarget = (domSelection: DOMSelection) => {
return (domSelection.anchorNode.parentElement.hasAttribute('data-slate-string') || domSelection.anchorNode.parentElement.hasAttribute('data-slate-zero-width')) &&
(domSelection.focusNode.parentElement.hasAttribute('data-slate-string') || domSelection.focusNode.parentElement.hasAttribute('data-slate-zero-width'));
}
/**
* remove default insert from composition
* @param text
*/
const preventInsertFromComposition = (event: Event, editor: AngularEditor) => {
const types = ['compositionend', 'insertFromComposition'];
if (!types.includes(event.type)) {
return;
}
const insertText = (event as CompositionEvent).data;
const window = AngularEditor.getWindow(editor);
const domSelection = window.getSelection();
// ensure text node insert composition input text
if (insertText && domSelection.anchorNode instanceof Text && domSelection.anchorNode.textContent.endsWith(insertText)) {
const textNode = domSelection.anchorNode;
textNode.splitText(textNode.length - insertText.length).remove();
}
} | the_stack |
import {
AfterViewChecked,
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
Inject,
NgZone,
OnInit,
ViewChild,
} from '@angular/core';
import { Clipboard } from '@angular/cdk/clipboard';
import {
FormBuilder,
FormControl,
FormGroup,
ValidatorFn,
Validators,
} from '@angular/forms';
import {
BehaviorSubject,
combineLatest,
merge,
Observable,
ReplaySubject,
Subscription,
} from 'rxjs';
import {
debounceTime,
distinctUntilChanged,
map,
shareReplay,
startWith,
tap,
flatMap,
mapTo,
mergeMap,
filter,
withLatestFrom,
} from 'rxjs/operators';
import { getConfigurationFlag, formatTask } from './format-task/format-task';
import { TASK_EXECUTION_SCHEMA } from './task-execution-form.schema';
import {
TaskExecutionSchema,
TaskExecutionMessage,
ItemsWithEnum
} from '@nx-console/schema';
import { OptionType, Value } from '@angular/cli/models/interface';
declare global {
interface Window {
SET_TASK_EXECUTION_SCHEMA: (schema: TaskExecutionSchema) => void;
vscode: {
postMessage: (message: TaskExecutionMessage) => void;
};
}
}
@Component({
selector: 'vscode-ui-task-execution-form',
templateUrl: './task-execution-form.component.html',
styleUrls: ['./task-execution-form.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class TaskExecutionFormComponent implements OnInit, AfterViewChecked {
@ViewChild('scrollContainer') scrollContainer: ElementRef<HTMLElement>;
@ViewChild('formHeaderContainer')
formHeaderContainer: ElementRef<HTMLElement>;
private readonly activeFieldIdSubject = new BehaviorSubject<string>('');
readonly activeFieldName$ = this.activeFieldIdSubject.pipe(
distinctUntilChanged(),
map((field) => field.replace('-nx-console-field', ''))
);
private readonly architectSubject = new ReplaySubject<TaskExecutionSchema>();
readonly architect$ = this.architectSubject.asObservable();
readonly taskExecForm$: Observable<{
form: FormGroup;
architect: TaskExecutionSchema;
}> = this.architect$.pipe(
map((architect) => ({ form: this.buildForm(architect), architect })),
tap((taskExecForm) => {
if (this.dryRunSubscription) {
this.dryRunSubscription.unsubscribe();
this.dryRunSubscription = undefined;
}
if (taskExecForm.architect.command === 'generate') {
this.dryRunSubscription = taskExecForm.form.valueChanges
.pipe(
debounceTime(500),
filter(() => taskExecForm.form.valid)
)
.subscribe(() => {
this.runCommand({ ...taskExecForm, dryRun: true });
});
}
}),
shareReplay()
);
readonly defaultValues$ = this.taskExecForm$.pipe(
flatMap((taskExecForm) => {
const configurationControl = taskExecForm.form.get('configuration');
if (configurationControl) {
return configurationControl.valueChanges.pipe(
startWith(taskExecForm),
mapTo(taskExecForm)
);
}
return [taskExecForm];
}),
map(({ architect, form }) => {
const configurationControl = form.get('configuration');
const configurationName = configurationControl
? configurationControl.value
: undefined;
return this.getDefaultValuesForConfiguration(
architect,
configurationName
);
}),
shareReplay()
);
readonly filterFieldsControl = new FormControl('');
private readonly filterValue$ = (this.filterFieldsControl
.valueChanges as Observable<string>).pipe(
startWith(''),
map((filterValue) => filterValue.toLowerCase()),
distinctUntilChanged()
);
readonly filteredFields$: Observable<Set<string>> = combineLatest([
this.architect$.pipe(
map((architect) => {
return architect.options.map((field) => {
return {
fieldName: field.name,
fieldNameLowerCase: field.name.toLowerCase(),
};
});
})
),
this.filterValue$,
]).pipe(
map(([fields, filterValue]) => {
const filteredFields = new Set<string>();
fields.forEach((field) => {
if (field.fieldNameLowerCase.includes(filterValue)) {
filteredFields.add(field.fieldName);
}
});
return filteredFields;
}),
shareReplay()
);
runCommandArguments$ = this.taskExecForm$.pipe(
mergeMap((taskExecForm) =>
taskExecForm.form.valueChanges.pipe(
startWith(taskExecForm.form.value),
map(() => taskExecForm)
)
),
map(({ architect, form }) =>
this.serializeArgs(
form.value,
architect,
form.get('configuration')?.value
)
)
);
validFields$ = this.getValidFields$(true);
invalidFields$ = this.getValidFields$(false);
dryRunSubscription?: Subscription;
constructor(
private readonly fb: FormBuilder,
@Inject(TASK_EXECUTION_SCHEMA) public initialSchema: TaskExecutionSchema,
private readonly ngZone: NgZone,
private readonly changeDetectorRef: ChangeDetectorRef,
private readonly clipboard: Clipboard
) {}
ngOnInit() {
this.architectSubject.next(this.initialSchema);
window.SET_TASK_EXECUTION_SCHEMA = (schema) => {
this.ngZone.run(() => {
this.architectSubject.next(schema);
setTimeout(() => {
this.scrollToTop();
this.changeDetectorRef.detectChanges();
}, 0);
});
};
}
ngAfterViewChecked() {
if (!this.scrollContainer || this.scrollContainer.nativeElement.onscroll) {
return;
}
this.ngZone.runOutsideAngular(() => {
const scrollElement = this.scrollContainer.nativeElement;
const formHeaderElement = this.formHeaderContainer.nativeElement;
let scrolled = false;
scrollElement.onscroll = () => {
if (scrollElement.scrollTop === 0) {
formHeaderElement.classList.remove('scrolled');
scrolled = false;
} else {
if (!scrolled) {
formHeaderElement.classList.add('scrolled');
scrolled = true;
}
}
const fields = Array.from(
scrollElement.querySelectorAll<HTMLElement>('nx-console-field')
);
const top =
Number(scrollElement.scrollTop) +
Number(scrollElement.offsetTop) -
24;
const activeField =
fields.find((e: HTMLElement) => e.offsetTop > top) || fields[0];
if (this.activeFieldIdSubject.value !== activeField.id) {
this.ngZone.run(() => {
this.activeFieldIdSubject.next(activeField.id);
});
}
};
});
}
buildForm(architect: TaskExecutionSchema): FormGroup {
const taskExecForm = this.fb.group({});
if (architect.configurations && architect.configurations.length) {
const configurationFormControl = new FormControl();
taskExecForm.addControl('configuration', configurationFormControl);
configurationFormControl.registerOnChange(() => {
this.setConfiguration(
taskExecForm,
architect,
configurationFormControl.value
);
});
}
const defaultValues = this.getDefaultValuesForConfiguration(architect);
architect.options.forEach((schema) => {
const validators: Array<ValidatorFn> = [];
if (schema.required) {
validators.push(Validators.required);
}
if (schema.enum || schema.items) {
const validValueSet = new Set(
schema.enum ||
(schema.items as ItemsWithEnum).enum ||
(schema.items as string[])
);
validators.push((control) => {
if (
(control.value &&
!Array.isArray(control.value) &&
!validValueSet.has(control.value)) ||
// multiselect values are Array, check if all values are in Set
(Array.isArray(control.value) &&
!control.value.every((value: Value) => validValueSet.has(value)))
) {
return {
enum: 'Please select a value from the auto-completable list',
};
}
return null;
});
}
taskExecForm.addControl(
schema.name,
new FormControl(
(architect.contextValues && architect.contextValues[schema.name]) ||
defaultValues[schema.name],
validators
)
);
});
return taskExecForm;
}
setConfiguration(
taskExecForm: FormGroup,
architect: TaskExecutionSchema,
configurationName?: string
) {
const defaultValues = this.getDefaultValuesForConfiguration(
architect,
configurationName
);
taskExecForm.patchValue(defaultValues);
this.scrollToTop();
}
private scrollToTop() {
this.scrollContainer.nativeElement.scrollTo({
top: 0,
});
}
private getDefaultValuesForConfiguration(
architect: TaskExecutionSchema,
configurationName?: string
) {
const defaultValues: { [key: string]: string | string[] } = {};
architect.options.forEach((field) => {
if (field.default === undefined) {
defaultValues[field.name] = '';
return;
}
if (Array.isArray(field.default)) {
defaultValues[field.name] = field.default.map((item) => String(item));
} else {
defaultValues[field.name] =
String(field.default) || (field.type === OptionType.Boolean ? 'false' : '');
}
});
if (configurationName && architect.configurations) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const configuration = architect.configurations.find(
(c) => c.name === configurationName
)!;
configuration.defaultValues.forEach((value) => {
defaultValues[value.name] = value.defaultValue || '';
});
}
return defaultValues;
}
runCommand({
form,
architect,
dryRun,
}: {
form: FormGroup;
architect: TaskExecutionSchema;
dryRun?: boolean;
}) {
const configuration = form.get('configuration')?.value;
const args = this.serializeArgs(form.value, architect, configuration);
const flags = configuration
? [
getConfigurationFlag(configuration),
...args,
]
: args;
if (architect.command === 'generate') {
flags.push('--no-interactive');
}
if (dryRun) {
flags.push('--dry-run');
}
window.vscode.postMessage({
command: architect.command,
positional: architect.positional,
flags,
});
}
getValidFields$(
valid: boolean
): Observable<{ [name: string]: string[] | string | number | boolean }> {
return this.taskExecForm$.pipe(
mergeMap((taskExecForm) =>
merge(
taskExecForm.form.valueChanges,
taskExecForm.form.statusChanges
).pipe(
startWith(taskExecForm),
map(() => taskExecForm)
)
),
withLatestFrom(this.defaultValues$),
map(([{ form, architect }, defaultValues]) => {
return architect.options
.filter((option) => {
const control = form.controls[option.name];
return (
// ** VALID fields **
(valid &&
control.valid &&
// touched is not working with checkbox, so ignore touched and just check !== defaultValue
// control.touched &&
((option.type !== OptionType.Array && control.value !== defaultValues[option.name]) ||
(option.type === OptionType.Array &&
control.value &&
control.value.join(',') !== ((defaultValues[option.name] || []) as string[]).join(',')
)
)) ||
// ** INVALID fields **
// invalid and touched (checkbox is always valid as true/false)
(!valid && control.touched && control.invalid)
);
})
.reduce((options, option) => ({
...options,
[option.name]: form.controls[option.name].value,
}), {});
})
);
}
private serializeArgs(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
value: { [p: string]: any },
architect: TaskExecutionSchema,
configurationName?: string
): string[] {
const fields = architect.options.filter((s) => value[s.name]);
const defaultValues = this.getDefaultValuesForConfiguration(
architect,
configurationName
);
const args: string[] = [];
fields.forEach((f) => {
if (defaultValues[f.name] === value[f.name]) return;
if (!defaultValues[f.name] && !value[f.name]) return;
if (
Array.isArray(defaultValues[f.name]) &&
(defaultValues[f.name] as string[]).join(',') === value[f.name].join(',')
)
return;
if (f.positional) {
args.push(sanitizeWhitespace(value[f.name]));
} else if (f.type === OptionType.Boolean) {
args.push(value[f.name] === 'false' ? `--no-${f.name}` : `--${f.name}`);
} else {
const fieldValue = value[f.name];
if (Array.isArray(fieldValue)) {
const values = fieldValue.map((v) => sanitizeWhitespace(v));
args.push(`--${f.name}=${values.join(',')}`);
} else {
args.push(`--${f.name}=${sanitizeWhitespace(fieldValue)}`);
}
}
});
return args;
}
copyCommandToClipboard(form: FormGroup, architect: TaskExecutionSchema) {
const configuration = form.get('configuration')?.value;
this.clipboard.copy(
`${formatTask(architect, configuration)} ${this.serializeArgs(
form.value,
architect,
configuration
).join(' ')}`
);
}
}
function sanitizeWhitespace(value: string) {
const trimmed = value.trim();
return /\s/.test(trimmed) ? `'${trimmed}'` : trimmed; // NOTE: We use ' rather than " for powershell compatibility
} | the_stack |
import { AppInstanceJson, SolidityValueType } from "@counterfactual/types";
import { BigNumber, bigNumberify } from "ethers/utils";
import { xkeyKthAddress } from "../engine/xkeys";
import {
flip,
flipTokenIndexedBalances
} from "../ethereum/utils/free-balance-app";
import { Store } from "../store";
import { prettyPrintObject } from "../utils";
import { AppInstanceProposal } from ".";
import { AppInstance } from "./app-instance";
import {
CoinTransferMap,
createFreeBalance,
FreeBalanceClass,
TokenIndexedCoinTransferMap
} from "./free-balance";
// TODO: Hmmm this code should probably be somewhere else?
export const HARD_CODED_ASSUMPTIONS = {
freeBalanceDefaultTimeout: 172800
};
const ERRORS = {
APPS_NOT_EMPTY: (size: number) =>
`Expected the appInstances list to be empty but size ${size}`,
APP_DOES_NOT_EXIST: (identityHash: string) =>
`Attempted to edit an appInstance that does not exist: identity hash = ${identityHash}`,
FREE_BALANCE_MISSING: "Cannot find ETH Free Balance App in StateChannel",
FREE_BALANCE_IDX_CORRUPT: (idx: string) =>
`Index ${idx} used to find ETH Free Balance is broken`,
INSUFFICIENT_FUNDS:
"Attempted to install an appInstance without sufficient funds",
MULTISIG_OWNERS_NOT_SORTED:
"multisigOwners parameter of StateChannel must be sorted"
};
function sortAddresses(addrs: string[]) {
return addrs.sort((a, b) => (parseInt(a, 16) < parseInt(b, 16) ? -1 : 1));
}
export type SingleAssetTwoPartyIntermediaryAgreement = {
timeLockedPassThroughIdentityHash: string;
capitalProvided: string;
capitalProvider: string;
virtualAppUser: string;
tokenAddress: string;
};
export type StateChannelJSON = {
readonly multisigAddress: string;
readonly userNeuteredExtendedKeys: string[];
readonly proposedAppInstances: [string, AppInstanceProposal][];
readonly appInstances: [string, AppInstanceJson][];
readonly singleAssetTwoPartyIntermediaryAgreements: [
string,
SingleAssetTwoPartyIntermediaryAgreement
][];
readonly freeBalanceAppInstance: AppInstanceJson | undefined;
readonly monotonicNumProposedApps: number;
};
export class StateChannel {
constructor(
public readonly multisigAddress: string,
public readonly userNeuteredExtendedKeys: string[],
readonly proposedAppInstances: ReadonlyMap<
string,
AppInstanceProposal
> = new Map<string, AppInstanceProposal>([]),
readonly appInstances: ReadonlyMap<string, AppInstance> = new Map<
string,
AppInstance
>([]),
readonly singleAssetTwoPartyIntermediaryAgreements: ReadonlyMap<
string,
SingleAssetTwoPartyIntermediaryAgreement
> = new Map<string, SingleAssetTwoPartyIntermediaryAgreement>([]),
private readonly freeBalanceAppInstance?: AppInstance,
private readonly monotonicNumProposedApps: number = 0
) {
userNeuteredExtendedKeys.forEach(xpub => {
if (!xpub.startsWith("xpub")) {
throw Error(
`StateChannel constructor given invalid extended keys: ${prettyPrintObject(
userNeuteredExtendedKeys
)}`
);
}
});
}
public get multisigOwners() {
return this.getSigningKeysFor(0);
}
public get numProposedApps() {
return this.monotonicNumProposedApps;
}
public get numActiveApps() {
return this.appInstances.size;
}
public getAppInstance(appInstanceIdentityHash: string): AppInstance {
if (!this.appInstances.has(appInstanceIdentityHash)) {
throw Error(ERRORS.APP_DOES_NOT_EXIST(appInstanceIdentityHash));
}
return this.appInstances.get(appInstanceIdentityHash)!;
}
public hasAppInstance(appInstanceId: string): boolean {
return this.appInstances.has(appInstanceId);
}
public hasAppInstanceOfKind(address: string): boolean {
return (
Array.from(this.appInstances.values()).filter(
(appInstance: AppInstance) => {
return appInstance.appInterface.addr === address;
}
).length > 0
);
}
public mostRecentlyInstalledAppInstance(): AppInstance {
if (this.appInstances.size === 0) {
throw Error("There are no installed AppInstances in this StateChannel");
}
return [...this.appInstances.values()].reduce((prev, current) =>
current.appSeqNo > prev.appSeqNo ? current : prev
);
}
public mostRecentlyProposedAppInstance(): AppInstanceProposal {
if (this.proposedAppInstances.size === 0) {
throw Error("There are no proposed AppInstances in this StateChannel");
}
return [...this.proposedAppInstances.values()].reduce((prev, current) =>
current.appSeqNo > prev.appSeqNo ? current : prev
);
}
public getAppInstanceOfKind(address: string) {
const appInstances = Array.from(this.appInstances.values()).filter(
(appInstance: AppInstance) => {
return appInstance.appInterface.addr === address;
}
);
if (appInstances.length !== 1) {
throw Error(
`No AppInstance of addr ${address} exists on channel: ${this.multisigAddress}`
);
}
return appInstances[0];
}
public isAppInstanceInstalled(appInstanceIdentityHash: string) {
return this.appInstances.has(appInstanceIdentityHash);
}
public getSigningKeysFor(addressIndex: number): string[] {
return sortAddresses(
this.userNeuteredExtendedKeys.map(xpub =>
xkeyKthAddress(xpub, addressIndex)
)
);
}
public getNextSigningKeys(): string[] {
return this.getSigningKeysFor(this.monotonicNumProposedApps);
}
public get hasFreeBalance(): boolean {
return !!this.freeBalanceAppInstance;
}
public get freeBalance(): AppInstance {
if (this.freeBalanceAppInstance) {
return this.freeBalanceAppInstance;
}
throw Error(
"There is no free balance app instance installed in this state channel"
);
}
public getMultisigOwnerAddrOf(xpub: string): string {
const [alice, bob] = this.multisigOwners;
const topLevelKey = xkeyKthAddress(xpub, 0);
if (topLevelKey !== alice && topLevelKey !== bob) {
throw Error(
`getMultisigOwnerAddrOf received invalid xpub not in multisigOwners: ${xpub}`
);
}
return topLevelKey;
}
public getFreeBalanceAddrOf(xpub: string): string {
const [alice, bob] = this.freeBalanceAppInstance!.participants;
const topLevelKey = xkeyKthAddress(xpub, 0);
if (topLevelKey !== alice && topLevelKey !== bob) {
throw Error(
`getFreeBalanceAddrOf received invalid xpub without free balance account: ${xpub}`
);
}
return topLevelKey;
}
public getFreeBalanceClass() {
return FreeBalanceClass.fromAppInstance(this.freeBalance);
}
private build(args: {
multisigAddress?: string;
userNeuteredExtendedKeys?: string[];
appInstances?: ReadonlyMap<string, AppInstance>;
proposedAppInstances?: ReadonlyMap<string, AppInstanceProposal>;
singleAssetTwoPartyIntermediaryAgreements?: ReadonlyMap<
string,
SingleAssetTwoPartyIntermediaryAgreement
>;
freeBalanceAppInstance?: AppInstance;
monotonicNumProposedApps?: number;
}) {
return new StateChannel(
args.multisigAddress || this.multisigAddress,
args.userNeuteredExtendedKeys || this.userNeuteredExtendedKeys,
args.proposedAppInstances || this.proposedAppInstances,
args.appInstances || this.appInstances,
args.singleAssetTwoPartyIntermediaryAgreements ||
this.singleAssetTwoPartyIntermediaryAgreements,
args.freeBalanceAppInstance || this.freeBalanceAppInstance,
args.monotonicNumProposedApps || this.monotonicNumProposedApps
);
}
public incrementFreeBalance(increments: TokenIndexedCoinTransferMap) {
return this.build({
freeBalanceAppInstance: this.getFreeBalanceClass()
.increment(increments)
.toAppInstance(this.freeBalance)
});
}
public addActiveApp(activeApp: string) {
return this.build({
freeBalanceAppInstance: this.getFreeBalanceClass()
.addActiveApp(activeApp)
.toAppInstance(this.freeBalance)
});
}
public removeActiveApp(activeApp: string) {
return this.build({
freeBalanceAppInstance: this.getFreeBalanceClass()
.removeActiveApp(activeApp)
.toAppInstance(this.freeBalance)
});
}
public addActiveAppAndIncrementFreeBalance(
activeApp: string,
tokenIndexedIncrements: TokenIndexedCoinTransferMap
) {
return this.incrementFreeBalance(tokenIndexedIncrements).addActiveApp(
activeApp
);
}
public removeActiveAppAndIncrementFreeBalance(
activeApp: string,
tokenIndexedIncrements: TokenIndexedCoinTransferMap
) {
return this.removeActiveApp(activeApp).incrementFreeBalance(
tokenIndexedIncrements
);
}
public setFreeBalance(newFreeBalanceClass: FreeBalanceClass) {
return this.build({
freeBalanceAppInstance: newFreeBalanceClass.toAppInstance(
this.freeBalance
)
});
}
public static setupChannel(
freeBalanceAppAddress: string,
multisigAddress: string,
userNeuteredExtendedKeys: string[],
freeBalanceTimeout?: number
) {
return new StateChannel(
multisigAddress,
userNeuteredExtendedKeys,
new Map<string, AppInstanceProposal>([]),
new Map<string, AppInstance>([]),
new Map<string, SingleAssetTwoPartyIntermediaryAgreement>(),
createFreeBalance(
userNeuteredExtendedKeys,
freeBalanceAppAddress,
freeBalanceTimeout || HARD_CODED_ASSUMPTIONS.freeBalanceDefaultTimeout
),
1
);
}
public static createEmptyChannel(
multisigAddress: string,
userNeuteredExtendedKeys: string[]
) {
return new StateChannel(
multisigAddress,
userNeuteredExtendedKeys,
new Map<string, AppInstanceProposal>([]),
new Map<string, AppInstance>(),
new Map<string, SingleAssetTwoPartyIntermediaryAgreement>(),
// Note that this FreeBalance is undefined because a channel technically
// does not have a FreeBalance before the `setup` protocol gets run
undefined,
1
);
}
public addProposal(proposal: AppInstanceProposal) {
const proposedAppInstances = new Map<string, AppInstanceProposal>(
this.proposedAppInstances.entries()
);
proposedAppInstances.set(proposal.identityHash, proposal);
return this.build({
proposedAppInstances,
monotonicNumProposedApps: this.monotonicNumProposedApps + 1
});
}
public removeProposal(appInstanceId: string) {
const proposedAppInstances = new Map<string, AppInstanceProposal>(
this.proposedAppInstances.entries()
);
proposedAppInstances.delete(appInstanceId);
return this.build({
proposedAppInstances
});
}
public addAppInstance(appInstance: AppInstance) {
const appInstances = new Map<string, AppInstance>(
this.appInstances.entries()
);
appInstances.set(appInstance.identityHash, appInstance);
return this.build({
appInstances,
monotonicNumProposedApps: this.monotonicNumProposedApps + 1
});
}
public removeAppInstance(appInstanceId: string) {
const appInstances = new Map<string, AppInstance>(
this.appInstances.entries()
);
appInstances.delete(appInstanceId);
return this.build({
appInstances
});
}
public setState(appInstanceIdentityHash: string, state: SolidityValueType) {
const appInstance = this.getAppInstance(appInstanceIdentityHash);
const appInstances = new Map<string, AppInstance>(
this.appInstances.entries()
);
appInstances.set(appInstanceIdentityHash, appInstance.setState(state));
return this.build({
appInstances
});
}
public addSingleAssetTwoPartyIntermediaryAgreement(
targetIdentityHash: string,
agreement: SingleAssetTwoPartyIntermediaryAgreement,
decrements: CoinTransferMap,
tokenAddress: string
) {
// Add to singleAssetTwoPartyIntermediaryAgreements
const evaaInstances = new Map<
string,
SingleAssetTwoPartyIntermediaryAgreement
>(this.singleAssetTwoPartyIntermediaryAgreements.entries());
evaaInstances.set(targetIdentityHash, agreement);
return this.build({
singleAssetTwoPartyIntermediaryAgreements: evaaInstances
}).addActiveAppAndIncrementFreeBalance(targetIdentityHash, {
[tokenAddress]: flip(decrements)
});
}
public removeSingleAssetTwoPartyIntermediaryAgreement(
targetIdentityHash: string,
increments: { [addr: string]: BigNumber },
tokenAddress: string
) {
const singleAssetTwoPartyIntermediaryAgreements = new Map<
string,
SingleAssetTwoPartyIntermediaryAgreement
>(this.singleAssetTwoPartyIntermediaryAgreements.entries());
if (!singleAssetTwoPartyIntermediaryAgreements.delete(targetIdentityHash)) {
throw Error(
`cannot find agreement with target hash ${targetIdentityHash}`
);
}
return this.build({
singleAssetTwoPartyIntermediaryAgreements
}).removeActiveAppAndIncrementFreeBalance(targetIdentityHash, {
[tokenAddress]: increments
});
}
public installApp(
appInstance: AppInstance,
tokenIndexedDecrements: TokenIndexedCoinTransferMap
) {
// Verify appInstance has expected signingkeys
const participants = this.getSigningKeysFor(appInstance.appSeqNo);
if (!participants.every((v, idx) => v === appInstance.participants[idx])) {
throw Error(
"AppInstance passed to installApp has incorrect participants"
);
}
/// Add modified FB and new AppInstance to appInstances
const appInstances = new Map<string, AppInstance>(
this.appInstances.entries()
);
appInstances.set(appInstance.identityHash, appInstance);
return this.build({
appInstances
}).addActiveAppAndIncrementFreeBalance(
appInstance.identityHash,
flipTokenIndexedBalances(tokenIndexedDecrements)
);
}
public uninstallApp(
appInstanceIdentityHash: string,
tokenIndexedIncrements: TokenIndexedCoinTransferMap
) {
const appToBeUninstalled = this.getAppInstance(appInstanceIdentityHash);
if (appToBeUninstalled.identityHash !== appInstanceIdentityHash) {
throw Error(
`Consistency error: app stored under key ${appInstanceIdentityHash} has identityHah ${appToBeUninstalled.identityHash}`
);
}
const appInstances = new Map<string, AppInstance>(
this.appInstances.entries()
);
if (!appInstances.delete(appToBeUninstalled.identityHash)) {
throw Error(
`Consistency error: managed to call get on ${appInstanceIdentityHash} but failed to call delete`
);
}
return this.build({
appInstances
}).removeActiveAppAndIncrementFreeBalance(
appInstanceIdentityHash,
tokenIndexedIncrements
);
}
public getSingleAssetTwoPartyIntermediaryAgreementFromVirtualApp(
key: string
): SingleAssetTwoPartyIntermediaryAgreement {
const ret = this.singleAssetTwoPartyIntermediaryAgreements.get(key);
if (!ret) {
throw Error(
`Could not find any eth virtual app agreements with virtual app ${key}`
);
}
return ret;
}
toJson(): StateChannelJSON {
return {
multisigAddress: this.multisigAddress,
userNeuteredExtendedKeys: this.userNeuteredExtendedKeys,
proposedAppInstances: [...this.proposedAppInstances.entries()],
appInstances: [...this.appInstances.entries()].map((appInstanceEntry): [
string,
AppInstanceJson
] => {
return [appInstanceEntry[0], appInstanceEntry[1].toJson()];
}),
freeBalanceAppInstance: !!this.freeBalanceAppInstance
? this.freeBalanceAppInstance.toJson()
: // Note that this FreeBalance is undefined because a channel technically
// does not have a FreeBalance before the `setup` protocol gets run
undefined,
monotonicNumProposedApps: this.monotonicNumProposedApps,
singleAssetTwoPartyIntermediaryAgreements: [
...this.singleAssetTwoPartyIntermediaryAgreements.entries()
]
};
}
static fromJson(json: StateChannelJSON): StateChannel {
return new StateChannel(
json.multisigAddress,
json.userNeuteredExtendedKeys,
new Map(
[...Object.values(json.proposedAppInstances || [])].map((proposal): [
string,
AppInstanceProposal
] => {
return [proposal[0], proposal[1]];
})
),
new Map(
[...Object.values(json.appInstances || [])].map((appInstanceEntry): [
string,
AppInstance
] => {
return [
appInstanceEntry[0],
AppInstance.fromJson(appInstanceEntry[1])
];
})
),
new Map(json.singleAssetTwoPartyIntermediaryAgreements || []),
json.freeBalanceAppInstance
? AppInstance.fromJson(json.freeBalanceAppInstance)
: undefined,
json.monotonicNumProposedApps
);
}
static async getPeersAddressFromChannel(
myIdentifier: string,
store: Store,
multisigAddress: string
): Promise<string[]> {
const stateChannel = await store.getStateChannel(multisigAddress);
const owners = stateChannel.userNeuteredExtendedKeys;
return owners.filter(owner => owner !== myIdentifier);
}
static async getPeersAddressFromAppInstanceID(
myIdentifier: string,
store: Store,
appInstanceId: string
): Promise<string[]> {
const multisigAddress = await store.getMultisigAddressFromAppInstance(
appInstanceId
);
return StateChannel.getPeersAddressFromChannel(
myIdentifier,
store,
multisigAddress
);
}
} | the_stack |
* deeplearn.js backend.
*/
import * as tfc from '@tensorflow/tfjs-core';
import {onesLike as coreOnesLike, scalar, Tensor, Tensor1D, tensor1d, Tensor2D, Tensor3D, Tensor4D, tidy, where, zerosLike as coreZerosLike} from '@tensorflow/tfjs-core';
import {checkDataFormat} from '../common';
import {NotImplementedError, ValueError} from '../errors';
import {DataFormat, Shape} from '../keras_format/common';
import {HasShape} from '../types';
import * as math_utils from '../utils/math_utils';
import {imageDataFormat} from './common';
// tslint:enable
/* Setting and getting backend from deeplearn.js. */
// Default deeplearn.js backend is WebGL (GPU).
let backend: 'cpu'|'webgl' = 'webgl';
export function setBackend(requestedBackend: 'cpu'|'webgl') {
tfc.setBackend(requestedBackend);
backend = requestedBackend;
}
export function getBackend(): 'cpu'|'webgl' {
return backend;
}
/**
* Indicates whether the backend is operating symbolically.
*
* This function will be used to determine how to interpret user code. If
* it returns true, calls to the backend construct a symbolic graph; if
* it returns false, calls to the backend execute immediately.
*/
export function isBackendSymbolic(): boolean {
return false;
}
/**
* Get the number of elements in a Tensor.
* @param x The Tensor.
* @return Number of elements in `x`.
*/
export function countParams(x: HasShape): number {
const shape = x.shape;
if (shape.length > 0) {
return shape.reduce((a: number, b: number) => a * b);
} else {
// Scalar.
return 1;
}
}
/**
* Casts a tensor to a different dtype and returns it.
* @param x Input tensor.
* @param dtype String: 'float32'|'int32'|'bool'.
* @returns Tensor of the specified `dtype`.
*/
export function cast(x: Tensor, dtype: tfc.DataType): Tensor {
return x.asType(dtype);
}
/**
* Adds a 1-sized dimension at index "axis".
* @param x Input tensor.
* @param axis Position where to add the new axis.
* @returns Result of the dimension expansion.
*/
export function expandDims(x: Tensor, axis = -1): Tensor {
const outShape = x.shape.slice();
if (axis < 0) {
axis = outShape.length + axis + 1;
}
outShape.splice(axis, 0, 1);
return x.reshape(outShape);
}
/**
* Repeats a 2D tensor.
*
* If `x` has shape `[samples, dim]` and `n` is 2, for example, the output
* will have shape `[samples, 2, dim]`.
*
* @param x Input tensor.
* @param n Integer, number of times to repeat.
* @returns The result of the repeat operation.
* @throws ValueError: If input tensor is not 2D.
*/
export function repeat(x: Tensor, n: number): Tensor {
return tidy(() => {
if (x.shape.length !== 2) {
throw new ValueError(
`repeat() expects a rank-2 tensor, but received a ` +
`rank-${x.shape.length} tensor.`);
}
const y = expandDims(x, 1);
return tile(y, [1, n, 1]);
});
}
/**
* Flatten an Tensor into 1D.
* @param x Input tensor.
* @return The result of the flattening `x`.
*/
export function flatten(x: Tensor): Tensor {
const newShape = [math_utils.arrayProd(x.shape)];
return x.reshape(newShape);
}
/**
* Turn a nD tensor into a 2D tensor with same 0th dimension.
* In other words, it flattens each data samples of a batch.
*
* @param x The tensor to flatten. The rank of this tensor is required to be 2
* or higher.
* @return The result of the flattening.
*/
export function batchFlatten(x: Tensor): Tensor {
if (x.rank <= 1) {
throw new ValueError(
`batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);
}
const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];
return x.reshape(newShape);
}
/**
* Do slicing along the first axis.
* @param array input `tf.Tensor`.
* @param start starting index, inclusive.
* @param size size of the slice along the first axis.
* @returns result of the slicing.
* @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.
*/
export function sliceAlongFirstAxis(
array: Tensor, start: number, size: number): Tensor {
return tidy(() => {
switch (array.rank) {
case 1:
return tfc.slice1d(array as Tensor1D, start, size);
case 2:
return tfc.slice2d(
array as Tensor2D, [start, 0], [size, array.shape[1]]);
case 3:
return tfc.slice3d(
array as Tensor3D, [start, 0, 0],
[size, array.shape[1], array.shape[2]]);
case 4:
return tfc.slice4d(
array as Tensor4D, [start, 0, 0, 0],
[size, array.shape[1], array.shape[2], array.shape[3]]);
default:
throw new ValueError(
`sliceAlongFirstAxis() received an unsupported tensor rank: ` +
`${array.rank}`);
}
});
}
/**
* Do slicing along the last axis.
* @param array input `tf.Tensor`.
* @param start starting index, inclusive.
* @param size size of the slice along the last axis.
* @returns result of the slicing.
* @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.
*/
export function sliceAlongLastAxis(
array: Tensor, start: number, size: number): Tensor {
return tidy(() => {
switch (array.rank) {
case 1:
return tfc.slice1d(array as Tensor1D, start, size);
case 2:
return tfc.slice2d(
array as Tensor2D, [0, start], [array.shape[0], size]);
case 3:
return tfc.slice3d(
array as Tensor3D, [0, 0, start],
[array.shape[0], array.shape[1], size]);
case 4:
return tfc.slice4d(
array as Tensor4D, [0, 0, 0, start],
[array.shape[0], array.shape[1], array.shape[2], size]);
default:
throw new ValueError(
`sliceAlongLastAxis() received an unsupported tensor rank: ` +
`${array.rank}`);
}
});
}
/**
* Do slicing along the sepcified axis.
* @param array input `tf.Tensor`.
* @param start starting index, inclusive.
* @param size of the slice along the chosen axis.
* @param choose an axis.
* @returns result of the slicing.
* @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.
*/
export function sliceAlongAxis(
array: Tensor, start: number, size: number, axis: number): Tensor {
return tidy(() => {
switch (array.rank) {
case 1:
return tfc.slice1d(array as Tensor1D, start, size);
case 2:
switch (axis) {
case 1:
return sliceAlongFirstAxis(array, start, size);
case 2:
return sliceAlongLastAxis(array, start, size);
default:
throw new ValueError(
`The axis is not within the rank of the tensor ` +
`${axis}`);
}
case 3:
switch (axis) {
case 1:
return sliceAlongFirstAxis(array, start, size);
case 2:
return tfc.slice3d(
array as Tensor3D, [0, start, 0],
[array.shape[0], size, array.shape[2]]);
case 3:
return sliceAlongLastAxis(array, start, size);
default:
throw new ValueError(
`The axis is not within the rank of the tensor ` +
`${axis}`);
}
case 4:
switch (axis) {
case 1:
return sliceAlongFirstAxis(array, start, size);
case 2:
return tfc.slice4d(
array as Tensor4D, [0, start, 0, 0],
[array.shape[0], size, array.shape[2], array.shape[3]]);
case 3:
return tfc.slice4d(
array as Tensor4D, [0, 0, start, 0],
[array.shape[0], array.shape[1], size, array.shape[3]]);
case 4:
return sliceAlongLastAxis(array, start, size);
default:
throw new ValueError(
`The axis is not within the rank of the tensor ` +
`${axis}`);
}
default:
throw new ValueError(
`sliceAlongLastAxis() received an unsupported tensor rank: ` +
`${array.rank}`);
}
});
}
/**
* Concatenates a list of tensors alongside the specified axis.
* @param tensors `Array` of tensors to concatenate.
* @param axis Concatenation axis.
* @returns The result of the concatenation.
*/
export function concatenate(tensors: Tensor[], axis = -1): Tensor {
let rank: number;
if (axis < 0) {
rank = tensors[0].rank;
if (rank !== 0) {
axis = rank;
} else {
axis = 0;
}
}
if (axis === tensors[0].rank) {
// Porting Note: This is necessary because tfc.concat() requires axis to be
// in the interval [-rank, rank).
axis = -1;
}
// Porting Note: Sparse concat is not supported yet.
return tfc.concat(tensors, axis);
}
/**
* Concatenate two arrays along the first dimension.
* @param a The 1st `tf.Tensor` to concatenate.
* @param b The 2nd `tf.Tensor` to concatenate.
* @returns Result of the concatenation.
* @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.
*/
export function concatAlongFirstAxis(a: Tensor, b: Tensor): Tensor {
switch (a.rank) {
case 1:
return tfc.concat1d([a as Tensor1D, b as Tensor1D]);
case 2:
return tfc.concat2d([a as Tensor2D, b as Tensor2D], 0);
case 3:
return tfc.concat3d([a as Tensor3D, b as Tensor3D], 0);
case 4:
return tfc.concat4d([a as Tensor4D, b as Tensor4D], 0);
default:
throw new ValueError(
'concatAlongFirstAxis() received an unsupported tensor rank: ' +
a.rank);
}
}
/**
* Creates a tensor by tiling `x` by `n`.
* @param x A tensor.
* @param n An Array of integers or a single integer. If an Array, the length
* must be the same as the number of dimensions in `x`. If a single integer,
* it will be treated as an Array of length 1.
*/
export function tile(x: Tensor, n: number|number[]): Tensor {
if (!Array.isArray(n)) {
n = [n];
}
if (x.rank !== n.length) {
throw new ValueError(
`The length of input n (${n.length}) does not match ` +
`the number of dimensions in input x (${x.rank})`);
}
return tfc.tile(x, n);
}
/* Creation of random tensors. */
/**
* Get a tensor with normal distribution of values.
*
* @param shape Shape of the tensor.
* @param mean mean value of the normal distribution.
* @param stddev standard deviation of the normal distribution.
* @param dtype
* @param seed
* @return The normal tensor.
*/
export function randomNormal(
shape: Shape, mean = 0.0, stddev = 1.0, dtype?: 'float32'|'int32',
seed?: number): Tensor {
return tfc.randomNormal(shape, mean, stddev, dtype, seed);
}
/* Linear Algebra */
/**
* Multiply two tensors and returns the result as a tensor.
*
* For 2D tensors, this is equivalent to matrix multiplication (matMul).
* For tensors of higher ranks, it follows the Theano behavior,
* (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:
*
* For N dimensions it is a sum product over the last axis of x and the
* second-to-last of y:
*
* @param a A tensor of at least rank 2.
* @param b A tensor of at least rank 2.
* @param activation (optional) A string identifying the activation
* function.
* @return Result of the dot operation.
*/
export function dot(
a: Tensor, b: Tensor, activation?: tfc.fused.Activation,
bias?: Tensor): Tensor {
if ((a.rank < 2) || (b.rank < 2)) {
throw new NotImplementedError(
`dot requires both inputs to be rank >= 2` +
` but got x shape = ${a.shape} and y shape = ${b.shape}`);
}
if (b.rank >= 3) {
const xLastDim = a.shape.slice(-1)[0];
const ySecondLastDim = b.shape.slice(-2)[0];
if (xLastDim !== ySecondLastDim) {
throw new NotImplementedError(
`If rank y >= 3, then the second last dim` +
` of y must equal the last dim of x but got x shape = ${
a.shape} and ` +
` y shape = ${b.shape}`);
}
}
// Handle basic 2D x 2D case.
if ((a.rank === 2) && (b.rank === 2)) {
const transposeA = false;
const transposeB = false;
// tfc.fused.matMul only fuses certain activation functions. Unsupported
// activation functions are treated as 'linear' activations, which is
// equivalent to a no-op.
return tfc.fused.matMul({
a,
b: b as Tensor2D,
transposeA,
transposeB,
bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,
activation
});
} else {
// Reshape x into the analogous 2D Tensor.
const aFirstDims = a.shape.slice(); // Holds all but the last dim of x.
const aLastDim = aFirstDims.pop();
a = a.reshape([-1, aLastDim]);
// Reshape y into the analogous 2D Tensor, and keep track of the
// required dimensions to reproduce the output shape.
const bShape = b.shape.slice();
const bLastDim = bShape.pop();
const ySecondLastDim = bShape.pop();
const yOtherDims = [...bShape, bLastDim];
// permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]
// where r is the rank of y.
const perm = Array.from({length: b.rank}, (_, i) => {
if (i === 0) {
return b.rank - 2;
} else if (i <= b.rank - 2) {
return i - 1;
}
return i;
});
b = b.transpose(perm).reshape([ySecondLastDim, -1]);
// Multiply x and y as 2D Tensors, and then reshape back to original.
const outputShape = [...aFirstDims, ...yOtherDims];
const transposeA = false;
const transposeB = false;
return tfc.fused
.matMul({
a,
b,
transposeA,
transposeB,
bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,
activation
})
.reshape(outputShape);
}
}
/**
* Compute the sign Tensor of an input Tensor.
*
* Elements of the input `tf.Tensor` that are === 0 are mapped to 0.
* Elements of the input `tf.Tensor` that are > 0 are mapped to 1.
* Elements of the input `tf.Tensor` that are < 0 are mapped to -1.
*
* @param x Input `tf.Tensor`.
* @return The sign `tf.Tensor`.
*/
export function sign(x: Tensor): Tensor {
// TODO(cais): Move to the core.
return tidy(() => {
const zerosLikeX = coreZerosLike(x);
const onesLikeX = coreOnesLike(x);
return where(
tfc.equal(x, zerosLikeX), zerosLikeX,
where(
tfc.greater(x, coreZerosLike(x)), onesLikeX,
tfc.mul(-1, onesLikeX)));
});
}
/**
* Computes the one-hot representation of an integer tensor.
* @param indices nD integer tensor of shape
* `(batch_size, dim1, dim2, ... dim(n-1))`
* @param numClasses Integer, number of classes to consider.
* @returns (n + 1)D one hot representation of the input
* with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`
*/
export function oneHot(indices: Tensor, numClasses: number): Tensor {
return tidy(() => {
if (indices.rank !== 1) {
throw new Error(
'Only 1D one-hot tensors are supported in the ' +
'deeplearn backend, at present.');
}
indices = indices.toInt();
return tfc.oneHot(indices as Tensor1D, numClasses).toFloat();
});
}
/* Elementary math functions. */
/**
* Retrieves the elements of indices `indices` in the tensor `reference`.
* @param reference A tensor.
* @param indices An integer tensor of indices or an `Array` of integers.
* @param axis Axis along which to perform the gather operation.
* @returns The result of the gathering as a tensor.
*/
export function gather(
reference: Tensor, indices: number[]|Tensor1D, axis?: number): Tensor {
return tidy(() => {
if (Array.isArray(indices)) {
indices = tensor1d(indices, 'int32');
} else {
indices = indices.toInt();
}
return tfc.gather(reference, indices, axis);
});
}
/**
* Element-wise square.
* @param x Input tensor.
* @return element-wise x^2
*/
export function square(x: Tensor): Tensor {
return tfc.mulStrict(x, x);
}
/**
* Element-wise exponentiation.
*
* Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which
* takes advatnage of the backend's (e.g., TensorFlow's) automatic
* conversion to tensor. Here we allow `a` to be either a number or a tensor.
*
* @param x The base tensor.
* @param a The exponent, tensor or number. If a number, it is rounded to the
* nearest integer and converted to a tensor.
* @returns A tensor of the same shape as `x`.
*/
export function pow(x: Tensor, a: Tensor|number): Tensor {
return tidy(() => {
if (typeof (a) === 'number') {
a = scalar(Math.round(a), 'int32');
}
if (a.dtype !== 'int32') {
throw new NotImplementedError(
`Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);
}
return tfc.pow(x, a as Tensor);
});
}
/**
* Reshapes bias tensor according to rank of x.
*/
function reshapeBias(xRank: number, bias: Tensor, dataFormat: string) {
const biasShape = bias.shape;
if (bias.rank !== 1 && bias.rank !== xRank) {
throw new ValueError(
'Unexpected bias dimensions: ' + bias.rank +
'; expected it to be 1 or ' + xRank);
}
if (xRank === 5) {
if (dataFormat === 'channelsFirst') {
if (biasShape.length === 1) {
return bias.reshape([1, biasShape[0], 1, 1, 1]);
} else {
return bias.reshape(
[1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]);
}
} else if (dataFormat === 'channelsLast') {
if (biasShape.length === 1) {
return bias.reshape([1, 1, 1, 1, biasShape[0]]);
} else {
return bias.reshape([1].concat(biasShape));
}
}
} else if (xRank === 4) {
if (dataFormat === 'channelsFirst') {
if (biasShape.length === 1) {
return bias.reshape([1, biasShape[0], 1, 1]);
} else {
return bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]);
}
} else if (dataFormat === 'channelsLast') {
if (biasShape.length === 1) {
return bias.reshape([1, 1, 1, biasShape[0]]);
} else {
return bias.reshape([1].concat(biasShape));
}
}
} else if (xRank === 3) {
if (dataFormat === 'channelsFirst') {
if (biasShape.length === 1) {
return bias.reshape([1, biasShape[0], 1]);
} else {
return bias.reshape([1, biasShape[1], biasShape[0]]);
}
} else if (dataFormat === 'channelsLast') {
if (biasShape.length === 1) {
return bias.reshape([1, 1, biasShape[0]]);
} else {
return bias.reshape([1].concat(biasShape));
}
}
} else if (xRank < 3) {
return bias;
}
throw new ValueError(`Unsupported input rank by biasAdd: ${bias.rank}`);
}
/* Neural-network operations. */
/**
* Add a bias to a tensor.
*
* @param x The tensor to add the bias to.
* @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.
* @return Result of the bias adding.
* @throws ValueError: If the rank of `bias` is incorrect.
*/
export function biasAdd(
x: Tensor, bias: Tensor, dataFormat?: DataFormat): Tensor {
return tidy(() => {
if (dataFormat == null) {
dataFormat = imageDataFormat();
}
checkDataFormat(dataFormat);
return x.add(reshapeBias(x.rank, bias, dataFormat));
});
}
/**
* Exponential linear unit (ELU).
* @param x A tensor or variable to compute the activation function for.
* @param alpha: A scalar, a scaling factor for the negative section.
* @return Output of the ELU operation.
*/
export function elu(x: Tensor, alpha = 1): Tensor {
// TODO(cais): Add support for alpha values other than 1.
if (alpha !== 1) {
throw new NotImplementedError(
`Support for alpha values other than 1 (${alpha}) is not implemented ` +
`yet.`);
}
return tfc.elu(x);
}
/**
* Softsign of a tensor.
*
* Defined as x / (abs(x) + 1), element-wise.
*
* @param x: Input.
* @returns Output.
*/
export function softsign(x: Tensor): Tensor {
return tidy(() => tfc.div(x, tfc.abs(x).add(1)));
}
/**
* Sets entries in `x` to zero at random, while scaling the entire tensor.
*
* @param x input tensor.
* @param level fraction of the entries in the tensor that will be set to 0.
* @param noiseShape shape of randomly generated keep/drop flags, must be
* broadcastable to the shape of `x`. Optional.
* @param seed random seed to ensure determinism. Optional.
* @returns Result of the dropout operation.
*/
export function dropout(
x: Tensor, level: number, noiseShape?: number[], seed?: number): Tensor {
return tidy(() => tfc.dropout(x, level, noiseShape, seed));
}
/**
* Element-wise, segment-wise linear approximation of sigmoid.
*
* Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.
* In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.
*
* @param x Input tensor.
* @returns Output tensor.
*/
export function hardSigmoid(x: Tensor): Tensor {
return tidy(() => {
const y = tfc.add(.5, tfc.mul(.2, x));
return tfc.clipByValue(y, 0, 1);
});
}
/**
* Invoke `x` in the training phase, and `alt` otherwise.
*
* Porting Note: We do not create placeholder tensors for the `training`
* boolean flag here, because there is no such thing in the TF.js imperative
* backend.
*
* @param x The function to invoke iff `training` is `true`.
* @param alt The function to invoke iff `training` is `false`.
* @param training Boolean flag for whether training phase is active.
* @returns The return value of `x()` if `training` is `true`, or the return
* value of `alt()` if `training` is `false`.
*/
export function inTrainPhase<T>(x: () => T, alt: () => T, training = false): T {
return training ? x() : alt();
} | the_stack |
namespace fgui {
export class ScrollPane extends PIXI.utils.EventEmitter {
private static $easeTypeFunc:Function = (t:number, d:number):number => { return (t = t / d - 1) * t * t + 1; }; //cubic out
private $owner: GComponent;
private $maskContainer: UIContainer;
private $container: PIXI.Container;
private $alignContainer: PIXI.Container;
private $scrollType: number;
private $scrollSpeed: number;
private $mouseWheelSpeed: number;
private $decelerationRate: number;
private $scrollBarMargin: utils.Margin;
private $bouncebackEffect: boolean;
private $touchEffect: boolean;
private $scrollBarDisplayAuto: boolean;
private $vScrollNone: boolean;
private $hScrollNone: boolean;
private $needRefresh: boolean;
private $refreshBarAxis: string;
private $displayOnLeft: boolean;
private $snapToItem: boolean;
private $displayOnDemand: boolean;
private $mouseWheelEnabled: boolean;
private $pageMode: boolean;
private $inertiaDisabled: boolean;
private $xPos: number;
private $yPos: number;
private $viewSize: PIXI.Point;
private $contentSize: PIXI.Point;
private $overlapSize: PIXI.Point;
private $pageSize: PIXI.Point;
private $containerPos: PIXI.Point;
private $beginTouchPos: PIXI.Point;
private $lastTouchPos: PIXI.Point;
private $lastTouchGlobalPos: PIXI.Point;
private $velocity: PIXI.Point;
private $velocityScale: number;
private $lastMoveTime: number;
private $isHoldAreaDone: boolean;
private $aniFlag: number;
private $scrollBarVisible: boolean;
private $headerLockedSize: number;
private $footerLockedSize: number;
private $refreshEventDispatching: boolean;
private $tweening: number;
private $tweenTime: PIXI.Point;
private $tweenDuration: PIXI.Point;
private $tweenStart: PIXI.Point;
private $tweenChange: PIXI.Point;
private $pageController: controller.Controller;
private $hzScrollBar: GScrollBar;
private $vtScrollBar: GScrollBar;
private $header: GComponent;
private $footer: GComponent;
private $isDragging: boolean = false;
public static draggingPane: ScrollPane;
private static $gestureFlag: number = 0;
private static sHelperPoint: PIXI.Point = new PIXI.Point();
private static sHelperRect: PIXI.Rectangle = new PIXI.Rectangle();
private static sEndPos: PIXI.Point = new PIXI.Point();
private static sOldChange: PIXI.Point = new PIXI.Point();
public static TWEEN_DEFAULT_DURATION: number = .4;
public static TWEEN_MANUALLY_SET_DURATION: number = 0.5; //tween duration used when call setPos(useAni=true)
public static PULL_DIST_RATIO: number = 0.5; //pulldown / pullup distance ratio of the whole viewport
/**@internal */
$loop: number;
public constructor(owner: GComponent,
scrollType: number,
scrollBarMargin: utils.Margin,
scrollBarDisplay: number,
flags: number,
vtScrollBarRes: string,
hzScrollBarRes: string,
headerRes: string,
footerRes: string) {
super();
this.$owner = owner;
this.$maskContainer = new UIContainer(null);
this.$owner.$rootContainer.addChild(this.$maskContainer);
this.$container = this.$owner.$container;
this.$container.x = 0;
this.$container.y = 0;
this.$maskContainer.addChild(this.$container);
this.$scrollBarMargin = scrollBarMargin;
this.$scrollType = scrollType;
this.$scrollSpeed = UIConfig.defaultScrollSpeed;
this.$mouseWheelSpeed = this.$scrollSpeed * 2;
this.$decelerationRate = UIConfig.defaultScrollDecelerationRate;
this.$displayOnLeft = (flags & ScrollPaneFlags.DisplayOnLeft) != 0;
this.$snapToItem = (flags & ScrollPaneFlags.SnapToItem) != 0;
this.$displayOnDemand = (flags & ScrollPaneFlags.DisplayOnDemand) != 0;
this.$pageMode = (flags & ScrollPaneFlags.PageMode) != 0;
if (flags & ScrollPaneFlags.TouchEffect)
this.$touchEffect = true;
else if (flags & ScrollPaneFlags.DisableTouchEffect)
this.$touchEffect = false;
else
this.$touchEffect = UIConfig.defaultScrollTouchEffect;
if (flags & ScrollPaneFlags.BounceEffect)
this.$bouncebackEffect = true;
else if (flags & ScrollPaneFlags.DisableBounceEffect)
this.$bouncebackEffect = false;
else
this.$bouncebackEffect = UIConfig.defaultScrollBounceEffect;
this.$inertiaDisabled = (flags & ScrollPaneFlags.DisableInertia) != 0;
if ((flags & ScrollPaneFlags.DisableScissorRect) == 0)
this.$maskContainer.scrollRect = new PIXI.Rectangle();
this.$scrollBarVisible = true;
this.$mouseWheelEnabled = true;
this.$xPos = 0;
this.$yPos = 0;
this.$aniFlag = 0;
this.$footerLockedSize = 0;
this.$headerLockedSize = 0;
if (scrollBarDisplay == ScrollBarDisplayType.Default)
scrollBarDisplay = UIConfig.defaultScrollBarDisplay;
this.$viewSize = new PIXI.Point();
this.$contentSize = new PIXI.Point();
this.$pageSize = new PIXI.Point(1, 1);
this.$overlapSize = new PIXI.Point();
this.$tweening = 0;
this.$tweenTime = new PIXI.Point();
this.$tweenStart = new PIXI.Point();
this.$tweenDuration = new PIXI.Point();
this.$tweenChange = new PIXI.Point();
this.$velocity = new PIXI.Point();
this.$containerPos = new PIXI.Point();
this.$beginTouchPos = new PIXI.Point();
this.$lastTouchPos = new PIXI.Point();
this.$lastTouchGlobalPos = new PIXI.Point();
let res: string;
if (scrollBarDisplay != ScrollBarDisplayType.Hidden) {
if (this.$scrollType == ScrollType.Both || this.$scrollType == ScrollType.Vertical) {
const res: string = vtScrollBarRes ? vtScrollBarRes : UIConfig.verticalScrollBar;
if (res) {
this.$vtScrollBar = UIPackage.createObjectFromURL(res) as GScrollBar;
if (!this.$vtScrollBar)
throw new Error(`Cannot create scrollbar from ${res}`);
this.$vtScrollBar.setScrollPane(this, true);
this.$owner.$rootContainer.addChild(this.$vtScrollBar.displayObject);
}
}
if (this.$scrollType == ScrollType.Both || this.$scrollType == ScrollType.Horizontal) {
res = hzScrollBarRes ? hzScrollBarRes : UIConfig.horizontalScrollBar;
if (res) {
this.$hzScrollBar = UIPackage.createObjectFromURL(res) as GScrollBar;
if (!this.$hzScrollBar)
throw new Error(`Cannot create scrollbar from ${res}`);
this.$hzScrollBar.setScrollPane(this, false);
this.$owner.$rootContainer.addChild(this.$hzScrollBar.displayObject);
}
}
this.$scrollBarDisplayAuto = scrollBarDisplay == ScrollBarDisplayType.Auto;
if (this.$scrollBarDisplayAuto) {
this.$scrollBarVisible = false;
if (this.$vtScrollBar)
this.$vtScrollBar.displayObject.visible = false;
if (this.$hzScrollBar)
this.$hzScrollBar.displayObject.visible = false;
}
}
else
this.$mouseWheelEnabled = false;
if (headerRes) {
this.$header = UIPackage.createObjectFromURL(headerRes) as GComponent;
if (this.$header == null)
throw new Error(`Cannot create scrollPane.header from ${res}`);
}
if (footerRes) {
this.$footer = UIPackage.createObjectFromURL(footerRes) as GComponent;
if (this.$footer == null)
throw new Error(`Cannot create scrollPane.footer from ${res}`);
}
if (this.$header != null || this.$footer != null)
this.$refreshBarAxis = (this.$scrollType == ScrollType.Both || this.$scrollType == ScrollType.Vertical) ? "y" : "x";
this.setSize(owner.width, owner.height);
this.$owner.on(InteractiveEvents.Over, this.$rollOver, this);
this.$owner.on(InteractiveEvents.Out, this.$rollOut, this);
this.$owner.on(InteractiveEvents.Down, this.$mouseDown, this);
this.$owner.on(DisplayObjectEvent.MOUSE_WHEEL, this.$mouseWheel, this);
}
public dispose(): void {
if (this.$tweening != 0)
GTimer.inst.remove(this.tweenUpdate, this);
this.$pageController = null;
if (this.$hzScrollBar != null)
this.$hzScrollBar.dispose();
if (this.$vtScrollBar != null)
this.$vtScrollBar.dispose();
if (this.$header != null)
this.$header.dispose();
if (this.$footer != null)
this.$footer.dispose();
GRoot.inst.nativeStage.off(InteractiveEvents.Move, this.$mouseMove, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Up, this.$mouseUp, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Click, this.$click, this);
this.$owner.off(InteractiveEvents.Over, this.$rollOver, this);
this.$owner.off(InteractiveEvents.Out, this.$rollOut, this);
this.$owner.off(InteractiveEvents.Down, this.$mouseDown, this);
this.$owner.off(DisplayObjectEvent.MOUSE_WHEEL, this.$mouseWheel, this);
}
public get owner(): GComponent {
return this.$owner;
}
public get horzScrollBar(): GScrollBar {
return this.$hzScrollBar;
}
public get vertScrollBar(): GScrollBar {
return this.$vtScrollBar;
}
public get header(): GComponent {
return this.$header;
}
public get footer(): GComponent {
return this.$footer;
}
public get bouncebackEffect(): boolean {
return this.$bouncebackEffect;
}
public set bouncebackEffect(sc: boolean) {
this.$bouncebackEffect = sc;
}
public get touchEffect(): boolean {
return this.$touchEffect;
}
public set touchEffect(sc: boolean) {
this.$touchEffect = sc;
}
public set scrollSpeed(val: number) {
this.$scrollSpeed = val;
if (this.$scrollSpeed == 0)
this.$scrollSpeed = UIConfig.defaultScrollSpeed;
this.$mouseWheelSpeed = this.$scrollSpeed * 2;
}
public get scrollSpeed(): number {
return this.$scrollSpeed;
}
public get snapToItem(): boolean {
return this.$snapToItem;
}
public set snapToItem(value: boolean) {
this.$snapToItem = value;
}
public get mouseWheelEnabled(): boolean {
return this.$mouseWheelEnabled;
}
public set mouseWheelEnabled(value: boolean) {
this.$mouseWheelEnabled = value;
}
public get decelerationRate(): number {
return this.$decelerationRate;
}
public set decelerationRate(value: number) {
this.$decelerationRate = value;
}
public get percX(): number {
return this.$overlapSize.x == 0 ? 0 : this.$xPos / this.$overlapSize.x;
}
public set percX(value: number) {
this.setPercX(value, false);
}
public setPercX(value: number, ani: boolean = false): void {
this.$owner.ensureBoundsCorrect();
this.setPosX(this.$overlapSize.x * utils.NumberUtil.clamp01(value), ani);
}
public get percY(): number {
return this.$overlapSize.y == 0 ? 0 : this.$yPos / this.$overlapSize.y;
}
public set percY(value: number) {
this.setPercY(value, false);
}
public setPercY(value: number, ani: boolean = false): void {
this.$owner.ensureBoundsCorrect();
this.setPosY(this.$overlapSize.y * utils.NumberUtil.clamp01(value), ani);
}
public get posX(): number {
return this.$xPos;
}
public set posX(value: number) {
this.setPosX(value, false);
}
public setPosX(value: number, ani: boolean = false): void {
this.$owner.ensureBoundsCorrect();
if (this.$loop == 1)
value = this.loopCheckingNewPos(value, "x");
value = utils.NumberUtil.clamp(value, 0, this.$overlapSize.x);
if (value != this.$xPos) {
this.$xPos = value;
this.posChanged(ani);
}
}
public get posY(): number {
return this.$yPos;
}
public set posY(value: number) {
this.setPosY(value, false);
}
public setPosY(value: number, ani: boolean = false): void {
this.$owner.ensureBoundsCorrect();
if (this.$loop == 1)
value = this.loopCheckingNewPos(value, "y");
value = utils.NumberUtil.clamp(value, 0, this.$overlapSize.y);
if (value != this.$yPos) {
this.$yPos = value;
this.posChanged(ani);
}
}
public get contentWidth(): number {
return this.$contentSize.x;
}
public get contentHeight(): number {
return this.$contentSize.y;
}
public get viewWidth(): number {
return this.$viewSize.x;
}
public set viewWidth(value: number) {
value = value + this.$owner.margin.left + this.$owner.margin.right;
if (this.$vtScrollBar != null)
value += this.$vtScrollBar.width;
this.$owner.width = value;
}
public get viewHeight(): number {
return this.$viewSize.y;
}
public set viewHeight(value: number) {
value = value + this.$owner.margin.top + this.$owner.margin.bottom;
if (this.$hzScrollBar != null)
value += this.$hzScrollBar.height;
this.$owner.height = value;
}
public get currentPageX(): number {
if (!this.$pageMode)
return 0;
var page: number = Math.floor(this.$xPos / this.$pageSize.x);
if (this.$xPos - page * this.$pageSize.x > this.$pageSize.x * 0.5)
page++;
return page;
}
public set currentPageX(value: number) {
if (this.$pageMode && this.$overlapSize.x > 0)
this.setPosX(value * this.$pageSize.x, false);
}
public get currentPageY(): number {
if (!this.$pageMode)
return 0;
let page: number = Math.floor(this.$yPos / this.$pageSize.y);
if (this.$yPos - page * this.$pageSize.y > this.$pageSize.y * 0.5)
page++;
return page;
}
public set currentPageY(value: number) {
if (this.$pageMode && this.$overlapSize.y > 0)
this.setPosY(value * this.$pageSize.y, false);
}
public get isBottomMost(): boolean {
return this.$yPos == this.$overlapSize.y || this.$overlapSize.y == 0;
}
public get isRightMost(): boolean {
return this.$xPos == this.$overlapSize.x || this.$overlapSize.x == 0;
}
public get pageController(): controller.Controller {
return this.$pageController;
}
public set pageController(value: controller.Controller) {
this.$pageController = value;
}
public get scrollingPosX(): number {
return utils.NumberUtil.clamp(-this.$container.x, 0, this.$overlapSize.x);
}
public get scrollingPosY(): number {
return utils.NumberUtil.clamp(-this.$container.y, 0, this.$overlapSize.y);
}
public scrollTop(ani: boolean = false): void {
this.setPercY(0, ani);
}
public scrollBottom(ani: boolean = false): void {
this.setPercY(1, ani);
}
public scrollUp(ratio: number = 1, ani: boolean = false): void {
if (this.$pageMode)
this.setPosY(this.$yPos - this.$pageSize.y * ratio, ani);
else
this.setPosY(this.$yPos - this.$scrollSpeed * ratio, ani);;
}
public scrollDown(ratio: number = 1, ani: boolean = false): void {
if (this.$pageMode)
this.setPosY(this.$yPos + this.$pageSize.y * ratio, ani);
else
this.setPosY(this.$yPos + this.$scrollSpeed * ratio, ani);
}
public scrollLeft(ratio: number = 1, ani: boolean = false): void {
if (this.$pageMode)
this.setPosX(this.$xPos - this.$pageSize.x * ratio, ani);
else
this.setPosX(this.$xPos - this.$scrollSpeed * ratio, ani);
}
public scrollRight(ratio: number = 1, ani: boolean = false): void {
if (this.$pageMode)
this.setPosX(this.$xPos + this.$pageSize.x * ratio, ani);
else
this.setPosX(this.$xPos + this.$scrollSpeed * ratio, ani);
}
public scrollToView(target: Object, ani: boolean = false, snapToFirst: boolean = false): void {
this.$owner.ensureBoundsCorrect();
if (this.$needRefresh)
this.refresh();
let rect: PIXI.Rectangle;
if (target instanceof GObject) {
if (target.parent != this.$owner) {
target.parent.localToGlobalRect(target.x, target.y,
target.width, target.height, ScrollPane.sHelperRect);
rect = this.$owner.globalToLocalRect(ScrollPane.sHelperRect.x, ScrollPane.sHelperRect.y,
ScrollPane.sHelperRect.width, ScrollPane.sHelperRect.height, ScrollPane.sHelperRect);
}
else {
rect = ScrollPane.sHelperRect;
rect.x = target.x;
rect.y = target.y;
rect.width = target.width;
rect.height = target.height;
}
}
else
rect = target as PIXI.Rectangle;
if (this.$overlapSize.y > 0) {
const bottom: number = this.$yPos + this.$viewSize.y;
if (snapToFirst || rect.y <= this.$yPos || rect.height >= this.$viewSize.y) {
if (this.$pageMode)
this.setPosY(Math.floor(rect.y / this.$pageSize.y) * this.$pageSize.y, ani);
else
this.setPosY(rect.y, ani);
}
else if (rect.y + rect.height > bottom) {
if (this.$pageMode)
this.setPosY(Math.floor(rect.y / this.$pageSize.y) * this.$pageSize.y, ani);
else if (rect.height <= this.$viewSize.y / 2)
this.setPosY(rect.y + rect.height * 2 - this.$viewSize.y, ani);
else
this.setPosY(rect.y + rect.height - this.$viewSize.y, ani);
}
}
if (this.$overlapSize.x > 0) {
let right: number = this.$xPos + this.$viewSize.x;
if (snapToFirst || rect.x <= this.$xPos || rect.width >= this.$viewSize.x) {
if (this.$pageMode)
this.setPosX(Math.floor(rect.x / this.$pageSize.x) * this.$pageSize.x, ani);
else
this.setPosX(rect.x, ani);
}
else if (rect.x + rect.width > right) {
if (this.$pageMode)
this.setPosX(Math.floor(rect.x / this.$pageSize.x) * this.$pageSize.x, ani);
else if (rect.width <= this.$viewSize.x / 2)
this.setPosX(rect.x + rect.width * 2 - this.$viewSize.x, ani);
else
this.setPosX(rect.x + rect.width - this.$viewSize.x, ani);
}
}
if (!ani && this.$needRefresh)
this.refresh();
}
public isChildInView(obj: GObject): boolean {
if (this.$overlapSize.y > 0) {
var dist: number = obj.y + this.$container.y;
if (dist < -obj.height || dist > this.$viewSize.y)
return false;
}
if (this.$overlapSize.x > 0) {
dist = obj.x + this.$container.x;
if (dist < -obj.width || dist > this.$viewSize.x)
return false;
}
return true;
}
public cancelDragging(): void {
GRoot.inst.nativeStage.off(InteractiveEvents.Move, this.$mouseMove, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Up, this.$mouseUp, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Click, this.$click, this);
if (ScrollPane.draggingPane == this)
ScrollPane.draggingPane = null;
ScrollPane.$gestureFlag = 0;
this.$isDragging = false;
this.$maskContainer.interactive = true;
}
public get isDragging():boolean {
return this.$isDragging;
}
public lockHeader(size: number): void {
if (this.$headerLockedSize == size)
return;
this.$headerLockedSize = size;
if (!this.$refreshEventDispatching && (<IndexedObject>this.$container)[this.$refreshBarAxis] >= 0) {
this.$tweenStart.set(this.$container.x, this.$container.y);
this.$tweenChange.set(0, 0);
(<IndexedObject>this.$tweenChange)[this.$refreshBarAxis] = this.$headerLockedSize - (<IndexedObject>this.$tweenStart)[this.$refreshBarAxis];
this.$tweenDuration.set(ScrollPane.TWEEN_DEFAULT_DURATION, ScrollPane.TWEEN_DEFAULT_DURATION);
this.$tweenTime.set(0, 0);
this.$tweening = 2;
GTimer.inst.addLoop(1, this.tweenUpdate, this);
}
}
public lockFooter(size: number): void {
if (this.$footerLockedSize == size)
return;
this.$footerLockedSize = size;
if (!this.$refreshEventDispatching && (<IndexedObject>this.$container)[this.$refreshBarAxis] <= -(<IndexedObject>this.$overlapSize)[this.$refreshBarAxis]) {
this.$tweenStart.set(this.$container.x, this.$container.y);
this.$tweenChange.set(0, 0);
let max: number = (<IndexedObject>this.$overlapSize)[this.$refreshBarAxis];
if (max == 0)
max = Math.max((<IndexedObject>this.$contentSize)[this.$refreshBarAxis] + this.$footerLockedSize - (<IndexedObject>this.$viewSize)[this.$refreshBarAxis], 0);
else
max += this.$footerLockedSize;
(<IndexedObject>this.$tweenChange)[this.$refreshBarAxis] = -max - (<IndexedObject>this.$tweenStart)[this.$refreshBarAxis];
this.$tweenDuration.set(ScrollPane.TWEEN_DEFAULT_DURATION, ScrollPane.TWEEN_DEFAULT_DURATION);
this.$tweenTime.set(0, 0);
this.$tweening = 2;
GTimer.inst.addLoop(1, this.tweenUpdate, this);
}
}
/**
* @internal
*/
onOwnerSizeChanged(): void {
this.setSize(this.$owner.width, this.$owner.height);
this.posChanged(false);
}
/**
* @internal
*/
handleControllerChanged(c: controller.Controller): void {
if (this.$pageController == c) {
if (this.$scrollType == ScrollType.Horizontal)
this.currentPageX = c.selectedIndex;
else
this.currentPageY = c.selectedIndex;
}
}
private updatePageController(): void {
if (this.$pageController != null && !this.$pageController.$updating) {
let index: number;
if (this.$scrollType == ScrollType.Horizontal)
index = this.currentPageX;
else
index = this.currentPageY;
if (index < this.$pageController.pageCount) {
const c: controller.Controller = this.$pageController;
this.$pageController = null; //prevent from handleControllerChanged calling
c.selectedIndex = index;
this.$pageController = c;
}
}
}
/**
* @internal
*/
adjustMaskContainer(): void {
let mx: number, my: number;
if (this.$displayOnLeft && this.$vtScrollBar != null)
mx = Math.floor(this.$owner.margin.left + this.$vtScrollBar.width);
else
mx = Math.floor(this.$owner.margin.left);
my = Math.floor(this.$owner.margin.top);
this.$maskContainer.position.set(mx, my);
if (this.$owner.$alignOffset.x != 0 || this.$owner.$alignOffset.y != 0) {
if (this.$alignContainer == null) {
this.$alignContainer = new PIXI.Container();
this.$maskContainer.addChild(this.$alignContainer);
this.$alignContainer.addChild(this.$container);
}
this.$alignContainer.position.set(this.$owner.$alignOffset.x, this.$owner.$alignOffset.y);
}
else if (this.$alignContainer)
this.$alignContainer.position.set(0, 0);
}
public setSize(width: number, height: number): void {
this.adjustMaskContainer();
if (this.$hzScrollBar) {
this.$hzScrollBar.y = height - this.$hzScrollBar.height;
if (this.$vtScrollBar && !this.$vScrollNone) {
this.$hzScrollBar.width = width - this.$vtScrollBar.width - this.$scrollBarMargin.left - this.$scrollBarMargin.right;
if (this.$displayOnLeft)
this.$hzScrollBar.x = this.$scrollBarMargin.left + this.$vtScrollBar.width;
else
this.$hzScrollBar.x = this.$scrollBarMargin.left;
}
else {
this.$hzScrollBar.width = width - this.$scrollBarMargin.left - this.$scrollBarMargin.right;
this.$hzScrollBar.x = this.$scrollBarMargin.left;
}
}
if (this.$vtScrollBar) {
if (!this.$displayOnLeft)
this.$vtScrollBar.x = width - this.$vtScrollBar.width;
if (this.$hzScrollBar)
this.$vtScrollBar.height = height - this.$hzScrollBar.height - this.$scrollBarMargin.top - this.$scrollBarMargin.bottom;
else
this.$vtScrollBar.height = height - this.$scrollBarMargin.top - this.$scrollBarMargin.bottom;
this.$vtScrollBar.y = this.$scrollBarMargin.top;
}
this.$viewSize.x = width;
this.$viewSize.y = height;
if (this.$hzScrollBar && !this.$hScrollNone)
this.$viewSize.y -= this.$hzScrollBar.height;
if (this.$vtScrollBar && !this.$vScrollNone)
this.$viewSize.x -= this.$vtScrollBar.width;
this.$viewSize.x -= (this.$owner.margin.left + this.$owner.margin.right);
this.$viewSize.y -= (this.$owner.margin.top + this.$owner.margin.bottom);
this.$viewSize.x = Math.max(1, this.$viewSize.x);
this.$viewSize.y = Math.max(1, this.$viewSize.y);
this.$pageSize.x = this.$viewSize.x;
this.$pageSize.y = this.$viewSize.y;
this.handleSizeChanged();
}
public setContentSize(w: number, h: number): void {
if (this.$contentSize.x == w && this.$contentSize.y == h)
return;
this.$contentSize.x = w;
this.$contentSize.y = h;
this.handleSizeChanged();
}
/**
* @internal
*/
changeContentSizeOnScrolling(deltaWidth: number, deltaHeight: number, deltaPosX: number, deltaPosY: number): void {
const isRightmost: boolean = this.$xPos == this.$overlapSize.x;
const isBottom: boolean = this.$yPos == this.$overlapSize.y;
this.$contentSize.x += deltaWidth;
this.$contentSize.y += deltaHeight;
this.handleSizeChanged();
if (this.$tweening == 1) {
//if the last scroll is CLINGING-SIDE, then just continue to cling
if (deltaWidth != 0 && isRightmost && this.$tweenChange.x < 0) {
this.$xPos = this.$overlapSize.x;
this.$tweenChange.x = -this.$xPos - this.$tweenStart.x;
}
if (deltaHeight != 0 && isBottom && this.$tweenChange.y < 0) {
this.$yPos = this.$overlapSize.y;
this.$tweenChange.y = -this.$yPos - this.$tweenStart.y;
}
}
else if (this.$tweening == 2) {
//re-pos to ensure the scrolling will go on smooth
if (deltaPosX != 0) {
this.$container.x -= deltaPosX;
this.$tweenStart.x -= deltaPosX;
this.$xPos = -this.$container.x;
}
if (deltaPosY != 0) {
this.$container.y -= deltaPosY;
this.$tweenStart.y -= deltaPosY;
this.$yPos = -this.$container.y;
}
}
else if (this.$isDragging) {
if (deltaPosX != 0) {
this.$container.x -= deltaPosX;
this.$containerPos.x -= deltaPosX;
this.$xPos = -this.$container.x;
}
if (deltaPosY != 0) {
this.$container.y -= deltaPosY;
this.$containerPos.y -= deltaPosY;
this.$yPos = -this.$container.y;
}
}
else {
//if the last scroll is CLINGING-SIDE, then just continue to cling
if (deltaWidth != 0 && isRightmost) {
this.$xPos = this.$overlapSize.x;
this.$container.x = -this.$xPos;
}
if (deltaHeight != 0 && isBottom) {
this.$yPos = this.$overlapSize.y;
this.$container.y = -this.$yPos;
}
}
if (this.$pageMode)
this.updatePageController();
}
private handleSizeChanged(onScrolling: boolean = false): void {
if (this.$displayOnDemand) {
if (this.$vtScrollBar) {
if (this.$contentSize.y <= this.$viewSize.y) {
if (!this.$vScrollNone) {
this.$vScrollNone = true;
this.$viewSize.x += this.$vtScrollBar.width;
}
}
else {
if (this.$vScrollNone) {
this.$vScrollNone = false;
this.$viewSize.x -= this.$vtScrollBar.width;
}
}
}
if (this.$hzScrollBar) {
if (this.$contentSize.x <= this.$viewSize.x) {
if (!this.$hScrollNone) {
this.$hScrollNone = true;
this.$viewSize.y += this.$hzScrollBar.height;
}
}
else {
if (this.$hScrollNone) {
this.$hScrollNone = false;
this.$viewSize.y -= this.$hzScrollBar.height;
}
}
}
}
if (this.$vtScrollBar) {
if (this.$viewSize.y < this.$vtScrollBar.minSize)
//use this.$vtScrollBar.displayObject.visible instead of this.$vtScrollBar.visible... ScrollBar actually is not in its owner's display tree, so vtScrollBar.visible will not work
this.$vtScrollBar.displayObject.visible = false;
else {
this.$vtScrollBar.displayObject.visible = this.$scrollBarVisible && !this.$vScrollNone;
if (this.$contentSize.y == 0)
this.$vtScrollBar.displayPerc = 0;
else
this.$vtScrollBar.displayPerc = Math.min(1, this.$viewSize.y / this.$contentSize.y);
}
}
if (this.$hzScrollBar) {
if (this.$viewSize.x < this.$hzScrollBar.minSize)
this.$hzScrollBar.displayObject.visible = false;
else {
this.$hzScrollBar.displayObject.visible = this.$scrollBarVisible && !this.$hScrollNone;
if (this.$contentSize.x == 0)
this.$hzScrollBar.displayPerc = 0;
else
this.$hzScrollBar.displayPerc = Math.min(1, this.$viewSize.x / this.$contentSize.x);
}
}
const rect: PIXI.Rectangle = this.$maskContainer.scrollRect;
if (rect) {
rect.width = this.$viewSize.x;
rect.height = this.$viewSize.y;
this.$maskContainer.scrollRect = rect;
}
if (this.$scrollType == ScrollType.Horizontal || this.$scrollType == ScrollType.Both)
this.$overlapSize.x = Math.ceil(Math.max(0, this.$contentSize.x - this.$viewSize.x));
else
this.$overlapSize.x = 0;
if (this.$scrollType == ScrollType.Vertical || this.$scrollType == ScrollType.Both)
this.$overlapSize.y = Math.ceil(Math.max(0, this.$contentSize.y - this.$viewSize.y));
else
this.$overlapSize.y = 0;
//bounds checking
this.$xPos = utils.NumberUtil.clamp(this.$xPos, 0, this.$overlapSize.x);
this.$yPos = utils.NumberUtil.clamp(this.$yPos, 0, this.$overlapSize.y);
if (this.$refreshBarAxis != null) {
var max: number = (<IndexedObject>this.$overlapSize)[this.$refreshBarAxis];
if (max == 0)
max = Math.max((<IndexedObject>this.$contentSize)[this.$refreshBarAxis] + this.$footerLockedSize - (<IndexedObject>this.$viewSize)[this.$refreshBarAxis], 0);
else
max += this.$footerLockedSize;
if (this.$refreshBarAxis == "x") {
this.$container.position.set(utils.NumberUtil.clamp(this.$container.x, -max, this.$headerLockedSize),
utils.NumberUtil.clamp(this.$container.y, -this.$overlapSize.y, 0));
}
else {
this.$container.position.set(utils.NumberUtil.clamp(this.$container.x, -this.$overlapSize.x, 0),
utils.NumberUtil.clamp(this.$container.y, -max, this.$headerLockedSize));
}
if (this.$header != null) {
if (this.$refreshBarAxis == "x")
this.$header.height = this.$viewSize.y;
else
this.$header.width = this.$viewSize.x;
}
if (this.$footer != null) {
if (this.$refreshBarAxis == "y")
this.$footer.height = this.$viewSize.y;
else
this.$footer.width = this.$viewSize.x;
}
}
else {
this.$container.position.set(utils.NumberUtil.clamp(this.$container.x, -this.$overlapSize.x, 0),
utils.NumberUtil.clamp(this.$container.y, -this.$overlapSize.y, 0));
}
this.syncScrollBar();
this.checkRefreshBar();
if (this.$pageMode)
this.updatePageController();
}
private posChanged(ani: boolean): void {
if (this.$aniFlag == 0)
this.$aniFlag = ani ? 1 : -1;
else if (this.$aniFlag == 1 && !ani)
this.$aniFlag = -1;
this.$needRefresh = true;
GTimer.inst.callLater(this.refresh, this);
}
private refresh(): void {
this.$needRefresh = false;
GTimer.inst.remove(this.refresh, this);
if (this.$pageMode || this.$snapToItem) {
ScrollPane.sEndPos.set(-this.$xPos, -this.$yPos);
this.alignPosition(ScrollPane.sEndPos, false);
this.$xPos = -ScrollPane.sEndPos.x;
this.$yPos = -ScrollPane.sEndPos.y;
}
this.refresh2();
//Events.dispatch(Events.SCROLL, this.$owner.displayObject);
this.emit(ScrollEvent.SCROLL, this);
if (this.$needRefresh) { //developer might modify position in the callback, so here refresh again to avoid flickering
this.$needRefresh = false;
GTimer.inst.remove(this.refresh, this);
this.refresh2();
}
this.syncScrollBar();
this.$aniFlag = 0;
}
private refresh2(): void {
if (this.$aniFlag == 1 && !this.$isDragging) {
let posX: number;
let posY: number;
if (this.$overlapSize.x > 0)
posX = -Math.floor(this.$xPos);
else {
if (this.$container.x != 0)
this.$container.x = 0;
posX = 0;
}
if (this.$overlapSize.y > 0)
posY = -Math.floor(this.$yPos);
else {
if (this.$container.y != 0)
this.$container.y = 0;
posY = 0;
}
if (posX != this.$container.x || posY != this.$container.y) {
this.$tweening = 1;
this.$tweenTime.set(0, 0);
this.$tweenDuration.set(ScrollPane.TWEEN_MANUALLY_SET_DURATION, ScrollPane.TWEEN_MANUALLY_SET_DURATION);
this.$tweenStart.set(this.$container.x, this.$container.y);
this.$tweenChange.set(posX - this.$tweenStart.x, posY - this.$tweenStart.y);
GTimer.inst.addLoop(1, this.tweenUpdate, this);
}
else if (this.$tweening != 0)
this.killTween();
}
else {
if (this.$tweening != 0)
this.killTween();
this.$container.position.set(Math.floor(-this.$xPos), Math.floor(-this.$yPos));
this.loopCheckingCurrent();
}
if (this.$pageMode)
this.updatePageController();
}
private syncScrollBar(end: boolean = false): void {
if (this.$vtScrollBar != null) {
this.$vtScrollBar.scrollPerc = this.$overlapSize.y == 0 ? 0 : utils.NumberUtil.clamp(-this.$container.y, 0, this.$overlapSize.y) / this.$overlapSize.y;
if (this.$scrollBarDisplayAuto)
this.showScrollBar(!end);
}
if (this.$hzScrollBar != null) {
this.$hzScrollBar.scrollPerc = this.$overlapSize.x == 0 ? 0 : utils.NumberUtil.clamp(-this.$container.x, 0, this.$overlapSize.x) / this.$overlapSize.x;
if (this.$scrollBarDisplayAuto)
this.showScrollBar(!end);
}
if (end)
this.$maskContainer.interactive = true;
}
private $mouseDown(e:PIXI.interaction.InteractionEvent): void {
if (!this.$touchEffect)
return;
if (this.$tweening != 0) {
this.killTween();
this.$isDragging = true;
}
else
this.$isDragging = false;
const globalMouse: PIXI.Point = PIXI.utils.isMobile.any ?
this.$owner.globalToLocal(e.data.global.x, e.data.global.y)
: this.$owner.globalToLocal(GRoot.globalMouseStatus.mouseX, GRoot.globalMouseStatus.mouseY, ScrollPane.sHelperPoint);
this.$containerPos.set(this.$container.x, this.$container.y);
this.$beginTouchPos.copy(globalMouse);
this.$lastTouchPos.copy(globalMouse);
this.$lastTouchGlobalPos.copy(globalMouse);
this.$isHoldAreaDone = false;
this.$velocity.set(0, 0);
this.$velocityScale = 1;
this.$lastMoveTime = GTimer.inst.curTime / 1000;
GRoot.inst.nativeStage.on(InteractiveEvents.Move, this.$mouseMove, this);
GRoot.inst.nativeStage.on(InteractiveEvents.Up, this.$mouseUp, this);
GRoot.inst.nativeStage.on(InteractiveEvents.Click, this.$click, this);
}
private $mouseMove(): void {
if (!this.$touchEffect)
return;
if (ScrollPane.draggingPane != null && ScrollPane.draggingPane != this || GObject.draggingObject != null)
return;
let sensitivity: number = UIConfig.touchScrollSensitivity;
const globalMouse: PIXI.Point = this.$owner.globalToLocal(GRoot.globalMouseStatus.mouseX, GRoot.globalMouseStatus.mouseY, ScrollPane.sHelperPoint);
let diff: number, diff2: number;
let sv: boolean, sh: boolean;
if (this.$scrollType == ScrollType.Vertical) {
if (!this.$isHoldAreaDone) {
//gesture on vertical dir is being observed
ScrollPane.$gestureFlag |= 1;
diff = Math.abs(this.$beginTouchPos.y - globalMouse.y);
if (diff < sensitivity)
return;
if ((ScrollPane.$gestureFlag & 2) != 0) {
diff2 = Math.abs(this.$beginTouchPos.x - globalMouse.x);
if (diff < diff2)
return;
}
}
sv = true;
}
else if (this.$scrollType == ScrollType.Horizontal) {
if (!this.$isHoldAreaDone) {
ScrollPane.$gestureFlag |= 2; //gesture on horz dir is being observed
diff = Math.abs(this.$beginTouchPos.x - globalMouse.x);
if (diff < sensitivity)
return;
if ((ScrollPane.$gestureFlag & 1) != 0) {
diff2 = Math.abs(this.$beginTouchPos.y - globalMouse.y);
if (diff < diff2)
return;
}
}
sh = true;
}
else {
ScrollPane.$gestureFlag = 3; //both
if (!this.$isHoldAreaDone) {
diff = Math.abs(this.$beginTouchPos.y - globalMouse.y);
if (diff < sensitivity) {
diff = Math.abs(this.$beginTouchPos.x - globalMouse.x);
if (diff < sensitivity)
return;
}
}
sv = sh = true;
}
let newPosX: number = Math.floor(this.$containerPos.x + globalMouse.x - this.$beginTouchPos.x);
let newPosY: number = Math.floor(this.$containerPos.y + globalMouse.y - this.$beginTouchPos.y);
if (sv) {
if (newPosY > 0) {
if (!this.$bouncebackEffect)
this.$container.y = 0;
else if (this.$header != null && this.$header.height != 0) //TODO: height -> maxHeight
this.$container.y = Math.floor(Math.min(newPosY * 0.5, this.$header.height));
else
this.$container.y = Math.floor(Math.min(newPosY * 0.5, this.$viewSize.y * ScrollPane.PULL_DIST_RATIO));
}
else if (newPosY < -this.$overlapSize.y) {
if (!this.$bouncebackEffect)
this.$container.y = -this.$overlapSize.y;
else if (this.$footer != null && this.$footer.height > 0) //TODO: height -> maxHeight
this.$container.y = Math.floor(Math.max((newPosY + this.$overlapSize.y) * 0.5, -this.$footer.height) - this.$overlapSize.y);
else
this.$container.y = Math.floor(Math.max((newPosY + this.$overlapSize.y) * 0.5, -this.$viewSize.y * ScrollPane.PULL_DIST_RATIO) - this.$overlapSize.y);
}
else
this.$container.y = newPosY;
}
if (sh) {
if (newPosX > 0) {
if (!this.$bouncebackEffect)
this.$container.x = 0;
else if (this.$header != null && this.$header.width != 0) //TODO: width -> maxWidth
this.$container.x = Math.floor(Math.min(newPosX * 0.5, this.$header.width));
else
this.$container.x = Math.floor(Math.min(newPosX * 0.5, this.$viewSize.x * ScrollPane.PULL_DIST_RATIO));
}
else if (newPosX < 0 - this.$overlapSize.x) {
if (!this.$bouncebackEffect)
this.$container.x = -this.$overlapSize.x;
else if (this.$footer != null && this.$footer.width > 0) //TODO: width -> maxWidth
this.$container.x = Math.floor(Math.max((newPosX + this.$overlapSize.x) * 0.5, -this.$footer.width) - this.$overlapSize.x);
else
this.$container.x = Math.floor(Math.max((newPosX + this.$overlapSize.x) * 0.5, -this.$viewSize.x * ScrollPane.PULL_DIST_RATIO) - this.$overlapSize.x);
}
else
this.$container.x = newPosX;
}
//update acceleration
const frameRate: number = GRoot.inst.applicationContext.ticker.FPS;
const now: number = GTimer.inst.curTime / 1000;
const deltaTime: number = Math.max(now - this.$lastMoveTime, 1 / frameRate);
let deltaPositionX: number = globalMouse.x - this.$lastTouchPos.x;
let deltaPositionY: number = globalMouse.y - this.$lastTouchPos.y;
if (!sh)
deltaPositionX = 0;
if (!sv)
deltaPositionY = 0;
if (deltaTime != 0) {
const elapsed: number = deltaTime * frameRate - 1;
if (elapsed > 1) {
const factor: number = Math.pow(0.833, elapsed);
this.$velocity.x = this.$velocity.x * factor;
this.$velocity.y = this.$velocity.y * factor;
}
this.$velocity.x = utils.NumberUtil.lerp(this.$velocity.x, deltaPositionX * 60 / frameRate / deltaTime, deltaTime * 10);
this.$velocity.y = utils.NumberUtil.lerp(this.$velocity.y, deltaPositionY * 60 / frameRate / deltaTime, deltaTime * 10);
}
//in the inertia scrolling we need the offset value to screen space, so here we need to reocrd the offset ratio
const deltaGlobalPositionX: number = this.$lastTouchGlobalPos.x - globalMouse.x;
const deltaGlobalPositionY: number = this.$lastTouchGlobalPos.y - globalMouse.y;
if (deltaPositionX != 0)
this.$velocityScale = Math.abs(deltaGlobalPositionX / deltaPositionX);
else if (deltaPositionY != 0)
this.$velocityScale = Math.abs(deltaGlobalPositionY / deltaPositionY);
this.$lastTouchPos.copy(globalMouse);
this.$lastTouchGlobalPos.copy(globalMouse);
this.$lastMoveTime = now;
//update position
if (this.$overlapSize.x > 0)
this.$xPos = utils.NumberUtil.clamp(-this.$container.x, 0, this.$overlapSize.x);
if (this.$overlapSize.y > 0)
this.$yPos = utils.NumberUtil.clamp(-this.$container.y, 0, this.$overlapSize.y);
if (this.$loop != 0) {
newPosX = this.$container.x;
newPosY = this.$container.y;
if (this.loopCheckingCurrent()) {
this.$containerPos.x += this.$container.x - newPosX;
this.$containerPos.y += this.$container.y - newPosY;
}
}
ScrollPane.draggingPane = this;
this.$isHoldAreaDone = true;
this.$isDragging = true;
this.$maskContainer.interactive = false;
this.syncScrollBar();
this.checkRefreshBar();
if (this.$pageMode)
this.updatePageController();
this.emit(ScrollEvent.SCROLL, this);
//Events.dispatch(Events.SCROLL, this.$owner.displayObject);
}
private $mouseUp(): void {
GRoot.inst.nativeStage.off(InteractiveEvents.Move, this.$mouseMove, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Up, this.$mouseUp, this);
GRoot.inst.nativeStage.off(InteractiveEvents.Click, this.$click, this);
if (ScrollPane.draggingPane == this)
ScrollPane.draggingPane = null;
ScrollPane.$gestureFlag = 0;
if (!this.$isDragging || !this.$touchEffect) {
this.$isDragging = false;
this.$maskContainer.interactive = true;
return;
}
this.$isDragging = false;
this.$maskContainer.interactive = true;
this.$tweenStart.set(this.$container.x, this.$container.y);
ScrollPane.sEndPos.set(this.$tweenStart.x, this.$tweenStart.y);
let flag: boolean = false;
if (this.$container.x > 0) {
ScrollPane.sEndPos.x = 0;
flag = true;
}
else if (this.$container.x < -this.$overlapSize.x) {
ScrollPane.sEndPos.x = -this.$overlapSize.x;
flag = true;
}
if (this.$container.y > 0) {
ScrollPane.sEndPos.y = 0;
flag = true;
}
else if (this.$container.y < -this.$overlapSize.y) {
ScrollPane.sEndPos.y = -this.$overlapSize.y;
flag = true;
}
if (flag) {
this.$tweenChange.set(ScrollPane.sEndPos.x - this.$tweenStart.x, ScrollPane.sEndPos.y - this.$tweenStart.y);
if (this.$tweenChange.x < -UIConfig.touchDragSensitivity || this.$tweenChange.y < -UIConfig.touchDragSensitivity) {
this.$refreshEventDispatching = true;
this.emit(ScrollEvent.PULL_DOWN_RELEASE);
//Events.dispatch(Events.PULLthis.$DOWNthis.$RELEASE, this.$owner.displayObject);
this.$refreshEventDispatching = false;
}
else if (this.$tweenChange.x > UIConfig.touchDragSensitivity || this.$tweenChange.y > UIConfig.touchDragSensitivity) {
this.$refreshEventDispatching = true;
this.emit(ScrollEvent.PULL_UP_RELEASE);
//Events.dispatch(Events.PULLthis.$UPthis.$RELEASE, this.$owner.displayObject);
this.$refreshEventDispatching = false;
}
if (this.$headerLockedSize > 0 && (<IndexedObject>ScrollPane.sEndPos)[this.$refreshBarAxis] == 0) {
(<IndexedObject>ScrollPane.sEndPos)[this.$refreshBarAxis] = this.$headerLockedSize;
this.$tweenChange.x = ScrollPane.sEndPos.x - this.$tweenStart.x;
this.$tweenChange.y = ScrollPane.sEndPos.y - this.$tweenStart.y;
}
else if (this.$footerLockedSize > 0 && (<IndexedObject>ScrollPane.sEndPos)[this.$refreshBarAxis] == -(<IndexedObject>this.$overlapSize)[this.$refreshBarAxis]) {
var max: number = (<IndexedObject>this.$overlapSize)[this.$refreshBarAxis];
if (max == 0)
max = Math.max((<IndexedObject>this.$contentSize)[this.$refreshBarAxis] + this.$footerLockedSize - (<IndexedObject>this.$viewSize)[this.$refreshBarAxis], 0);
else
max += this.$footerLockedSize;
(<IndexedObject>ScrollPane.sEndPos)[this.$refreshBarAxis] = -max;
this.$tweenChange.x = ScrollPane.sEndPos.x - this.$tweenStart.x;
this.$tweenChange.y = ScrollPane.sEndPos.y - this.$tweenStart.y;
}
this.$tweenDuration.set(ScrollPane.TWEEN_DEFAULT_DURATION, ScrollPane.TWEEN_DEFAULT_DURATION);
}
else {
if (!this.$inertiaDisabled) {
const frameRate: number = GRoot.inst.applicationContext.ticker.FPS;
const elapsed: number = (GTimer.inst.curTime / 1000 - this.$lastMoveTime) * frameRate - 1;
if (elapsed > 1) {
const factor: number = Math.pow(0.833, elapsed);
this.$velocity.x = this.$velocity.x * factor;
this.$velocity.y = this.$velocity.y * factor;
}
//calc dist & duration by speed
this.updateTargetAndDuration(this.$tweenStart, ScrollPane.sEndPos);
}
else
this.$tweenDuration.set(ScrollPane.TWEEN_DEFAULT_DURATION, ScrollPane.TWEEN_DEFAULT_DURATION);
ScrollPane.sOldChange.set(ScrollPane.sEndPos.x - this.$tweenStart.x, ScrollPane.sEndPos.y - this.$tweenStart.y);
//adjust
this.loopCheckingTarget(ScrollPane.sEndPos);
if (this.$pageMode || this.$snapToItem)
this.alignPosition(ScrollPane.sEndPos, true);
this.$tweenChange.x = ScrollPane.sEndPos.x - this.$tweenStart.x;
this.$tweenChange.y = ScrollPane.sEndPos.y - this.$tweenStart.y;
if (this.$tweenChange.x == 0 && this.$tweenChange.y == 0) {
if (this.$scrollBarDisplayAuto)
this.showScrollBar(false);
return;
}
if (this.$pageMode || this.$snapToItem) {
this.fixDuration("x", ScrollPane.sOldChange.x);
this.fixDuration("y", ScrollPane.sOldChange.y);
}
}
this.$tweening = 2;
this.$tweenTime.set(0, 0);
GTimer.inst.addLoop(1, this.tweenUpdate, this);
}
private $click(): void {
this.$isDragging = false;
}
private $mouseWheel(evt: any): void {
if (!this.$mouseWheelEnabled)
return;
const delta = evt.delta > 0 ? -1 : (evt.delta < 0 ? 1 : 0);
if (this.$overlapSize.x > 0 && this.$overlapSize.y == 0) {
if (this.$pageMode)
this.setPosX(this.$xPos + this.$pageSize.x * delta, false);
else
this.setPosX(this.$xPos + this.$mouseWheelSpeed * delta, false);
}
else {
if (this.$pageMode)
this.setPosY(this.$yPos + this.$pageSize.y * delta, false);
else
this.setPosY(this.$yPos + this.$mouseWheelSpeed * delta, false);
}
}
private $rollOver(): void {
this.showScrollBar(true);
}
private $rollOut(): void {
this.showScrollBar(false);
}
private showScrollBar(visible: boolean): void {
if (visible) {
GTimer.inst.remove(this.setScrollBarVisible, this);
this.setScrollBarVisible(true);
}
else
GTimer.inst.add(500, 1, this.setScrollBarVisible, this, visible);
}
private setScrollBarVisible(visible: boolean): void {
this.$scrollBarVisible = visible && this.$viewSize.x > 0 && this.$viewSize.y > 0;
if (this.$vtScrollBar)
this.$vtScrollBar.displayObject.visible = this.$scrollBarVisible && !this.$vScrollNone;
if (this.$hzScrollBar)
this.$hzScrollBar.displayObject.visible = this.$scrollBarVisible && !this.$hScrollNone;
}
private getLoopPartSize(division: number, axis: string): number {
let pad: number = 0;
if (this.$owner instanceof GList)
pad = axis == "x" ? this.$owner.columnGap : this.$owner.lineGap;
return ((<IndexedObject>this.$contentSize)[axis] + pad) / division;
}
private loopCheckingCurrent(): boolean {
let changed: boolean = false;
if (this.$loop == 1 && this.$overlapSize.x > 0) {
if (this.$xPos < 0.001) {
this.$xPos += this.getLoopPartSize(2, "x");
changed = true;
}
else if (this.$xPos >= this.$overlapSize.x) {
this.$xPos -= this.getLoopPartSize(2, "x");
changed = true;
}
}
else if (this.$loop == 2 && this.$overlapSize.y > 0) {
if (this.$yPos < 0.001) {
this.$yPos += this.getLoopPartSize(2, "y");
changed = true;
}
else if (this.$yPos >= this.$overlapSize.y) {
this.$yPos -= this.getLoopPartSize(2, "y");
changed = true;
}
}
if (changed)
this.$container.position.set(Math.floor(-this.$xPos), Math.floor(-this.$yPos));
return changed;
}
private loopCheckingTarget(endPos: PIXI.Point): void {
if (this.$loop == 1)
this.loopCheckingTarget2(endPos, "x");
if (this.$loop == 2)
this.loopCheckingTarget2(endPos, "y");
}
private loopCheckingTarget2(endPos: PIXI.Point, axis: string): void {
let halfSize: number;
let tmp: number;
if ((<IndexedObject>endPos)[axis] > 0) {
halfSize = this.getLoopPartSize(2, axis);
tmp = (<IndexedObject>this.$tweenStart)[axis] - halfSize;
if (tmp <= 0 && tmp >= -(<IndexedObject>this.$overlapSize)[axis]) {
(<IndexedObject>endPos)[axis] -= halfSize;
(<IndexedObject>this.$tweenStart)[axis] = tmp;
}
}
else if ((<IndexedObject>endPos)[axis] < -(<IndexedObject>this.$overlapSize)[axis]) {
halfSize = this.getLoopPartSize(2, axis);
tmp = (<IndexedObject>this.$tweenStart)[axis] + halfSize;
if (tmp <= 0 && tmp >= -(<IndexedObject>this.$overlapSize)[axis]) {
(<IndexedObject>endPos)[axis] += halfSize;
(<IndexedObject>this.$tweenStart)[axis] = tmp;
}
}
}
private loopCheckingNewPos(value: number, axis: string): number {
if ((<IndexedObject>this.$overlapSize)[axis] == 0)
return value;
let pos: number = axis == "x" ? this.$xPos : this.$yPos;
let changed: boolean = false;
let v: number;
if (value < 0.001) {
value += this.getLoopPartSize(2, axis);
if (value > pos) {
v = this.getLoopPartSize(6, axis);
v = Math.ceil((value - pos) / v) * v;
pos = utils.NumberUtil.clamp(pos + v, 0, (<IndexedObject>this.$overlapSize)[axis]);
changed = true;
}
}
else if (value >= (<IndexedObject>this.$overlapSize)[axis]) {
value -= this.getLoopPartSize(2, axis);
if (value < pos) {
v = this.getLoopPartSize(6, axis);
v = Math.ceil((pos - value) / v) * v;
pos = utils.NumberUtil.clamp(pos - v, 0, (<IndexedObject>this.$overlapSize)[axis]);
changed = true;
}
}
if (changed) {
if (axis == "x")
this.$container.x = -Math.floor(pos);
else
this.$container.y = -Math.floor(pos);
}
return value;
}
private alignPosition(pos: PIXI.Point, inertialScrolling: boolean): void {
if (this.$pageMode) {
pos.x = this.alignByPage(pos.x, "x", inertialScrolling);
pos.y = this.alignByPage(pos.y, "y", inertialScrolling);
}
else if (this.$snapToItem) {
var pt: PIXI.Point = this.$owner.getSnappingPosition(-pos.x, -pos.y, ScrollPane.sHelperPoint);
if (pos.x < 0 && pos.x > -this.$overlapSize.x)
pos.x = -pt.x;
if (pos.y < 0 && pos.y > -this.$overlapSize.y)
pos.y = -pt.y;
}
}
private alignByPage(pos: number, axis: string, inertialScrolling: boolean): number {
let page: number;
if (pos > 0)
page = 0;
else if (pos < -(<IndexedObject>this.$overlapSize)[axis])
page = Math.ceil((<IndexedObject>this.$contentSize)[axis] / (<IndexedObject>this.$pageSize)[axis]) - 1;
else {
page = Math.floor(-pos / (<IndexedObject>this.$pageSize)[axis]);
var change: number = inertialScrolling ? (pos - (<IndexedObject>this.$containerPos)[axis]) : (pos - (<IndexedObject>this.$container)[axis]);
var testPageSize: number = Math.min((<IndexedObject>this.$pageSize)[axis], (<IndexedObject>this.$contentSize)[axis] - (page + 1) * (<IndexedObject>this.$pageSize)[axis]);
var delta: number = -pos - page * (<IndexedObject>this.$pageSize)[axis];
//page mode magnetic
if (Math.abs(change) > (<IndexedObject>this.$pageSize)[axis]) {
if (delta > testPageSize * 0.5)
page++;
}
else {
if (delta > testPageSize * (change < 0 ? 0.3 : 0.7))
page++;
}
//re-calc dist
const dst = (<IndexedObject>this.$pageSize)[axis];
pos = -page * dst;
if (pos < -dst)
pos = -dst;
}
if (inertialScrolling) {
var oldPos: number = (<IndexedObject>this.$tweenStart)[axis];
var oldPage: number;
if (oldPos > 0)
oldPage = 0;
else if (oldPos < -(<IndexedObject>this.$overlapSize)[axis])
oldPage = Math.ceil((<IndexedObject>this.$contentSize)[axis] / (<IndexedObject>this.$pageSize)[axis]) - 1;
else
oldPage = Math.floor(-oldPos / (<IndexedObject>this.$pageSize)[axis]);
var startPage: number = Math.floor(-(<IndexedObject>this.$containerPos)[axis] / (<IndexedObject>this.$pageSize)[axis]);
if (Math.abs(page - startPage) > 1 && Math.abs(oldPage - startPage) <= 1) {
if (page > startPage)
page = startPage + 1;
else
page = startPage - 1;
pos = -page * (<IndexedObject>this.$pageSize)[axis];
}
}
return pos;
}
private updateTargetAndDuration(orignPos: PIXI.Point, resultPos: PIXI.Point): void {
resultPos.x = this.updateTargetAndDuration2(orignPos.x, "x");
resultPos.y = this.updateTargetAndDuration2(orignPos.y, "y");
}
private updateTargetAndDuration2(pos: number, axis: string): number {
let v: number = (<IndexedObject>this.$velocity)[axis];
var duration: number = 0;
if (pos > 0)
pos = 0;
else if (pos < -(<IndexedObject>this.$overlapSize)[axis])
pos = -(<IndexedObject>this.$overlapSize)[axis];
else {
let v2: number = Math.abs(v) * this.$velocityScale;
if (PIXI.utils.isMobile.any)
v2 *= Math.max(GRoot.inst.stageWrapper.designWidth, GRoot.inst.stageWrapper.designHeight) / Math.max(GRoot.inst.stageWidth, GRoot.inst.stageHeight);
//threshold, if too slow, stop it
let ratio: number = 0;
if (this.$pageMode || !PIXI.utils.isMobile.any) {
if (v2 > 500)
ratio = Math.pow((v2 - 500) / 500, 2);
}
else {
if (v2 > 1000)
ratio = Math.pow((v2 - 1000) / 1000, 2);
}
if (ratio != 0) {
if (ratio > 1)
ratio = 1;
v2 *= ratio;
v *= ratio;
(<IndexedObject>this.$velocity)[axis] = v;
duration = Math.log(60 / v2) / Math.log(this.$decelerationRate) / 60;
const change: number = (v / 60 - 1) / (1 - this.$decelerationRate);
//const change: number = Math.floor(v * duration * 0.4);
pos += change;
}
}
if (duration < ScrollPane.TWEEN_DEFAULT_DURATION)
duration = ScrollPane.TWEEN_DEFAULT_DURATION;
(<IndexedObject>this.$tweenDuration)[axis] = duration;
return pos;
}
private fixDuration(axis: string, oldChange: number): void {
if ((<IndexedObject>this.$tweenChange)[axis] == 0 || Math.abs((<IndexedObject>this.$tweenChange)[axis]) >= Math.abs(oldChange))
return;
let newDuration: number = Math.abs((<IndexedObject>this.$tweenChange)[axis] / oldChange) * (<IndexedObject>this.$tweenDuration)[axis];
if (newDuration < ScrollPane.TWEEN_DEFAULT_DURATION)
newDuration = ScrollPane.TWEEN_DEFAULT_DURATION;
(<IndexedObject>this.$tweenDuration)[axis] = newDuration;
}
private killTween(): void {
//tweening == 1: set to end immediately
if (this.$tweening == 1) {
this.$container.position.set(this.$tweenStart.x + this.$tweenChange.x, this.$tweenStart.y + this.$tweenChange.y);
this.emit(ScrollEvent.SCROLL, this);
//Events.dispatch(Events.SCROLL, this.$owner.displayObject);
}
this.$tweening = 0;
GTimer.inst.remove(this.tweenUpdate, this);
this.emit(ScrollEvent.SCROLL_END, this);
//Events.dispatch(Events.SCROLLthis.$END, this.$owner.displayObject);
}
private checkRefreshBar(): void {
if (this.$header == null && this.$footer == null)
return;
const pos: number = (<IndexedObject>this.$container)[this.$refreshBarAxis];
if (this.$header != null) {
if (pos > 0) {
if (this.$header.displayObject.parent == null)
this.$maskContainer.addChildAt(this.$header.displayObject, 0);
const pt: PIXI.Point = ScrollPane.sHelperPoint;
pt.set(this.$header.width, this.$header.height);
(<IndexedObject>pt)[this.$refreshBarAxis] = pos;
this.$header.setSize(pt.x, pt.y);
}
else {
if (this.$header.displayObject.parent != null)
this.$maskContainer.removeChild(this.$header.displayObject);
}
}
if (this.$footer != null) {
var max: number = (<IndexedObject>this.$overlapSize)[this.$refreshBarAxis];
if (pos < -max || max == 0 && this.$footerLockedSize > 0) {
if (this.$footer.displayObject.parent == null)
this.$maskContainer.addChildAt(this.$footer.displayObject, 0);
const pt: PIXI.Point = ScrollPane.sHelperPoint;
pt.set(this.$footer.x, this.$footer.y);
if (max > 0)
(<IndexedObject>pt)[this.$refreshBarAxis] = pos + (<IndexedObject>this.$contentSize)[this.$refreshBarAxis];
else
(<IndexedObject>pt)[this.$refreshBarAxis] = Math.max(Math.min(pos + (<IndexedObject>this.$viewSize)[this.$refreshBarAxis], (<IndexedObject>this.$viewSize)[this.$refreshBarAxis] - this.$footerLockedSize),
(<IndexedObject>this.$viewSize)[this.$refreshBarAxis] - (<IndexedObject>this.$contentSize)[this.$refreshBarAxis]);
this.$footer.setXY(pt.x, pt.y);
pt.set(this.$footer.width, this.$footer.height);
if (max > 0)
(<IndexedObject>pt)[this.$refreshBarAxis] = -max - pos;
else
(<IndexedObject>pt)[this.$refreshBarAxis] = (<IndexedObject>this.$viewSize)[this.$refreshBarAxis] - (<IndexedObject>this.$footer)[this.$refreshBarAxis];
this.$footer.setSize(pt.x, pt.y);
}
else {
if (this.$footer.displayObject.parent != null)
this.$maskContainer.removeChild(this.$footer.displayObject);
}
}
}
private tweenUpdate(): void {
var nx: number = this.runTween("x");
var ny: number = this.runTween("y");
this.$container.position.set(nx, ny);
if (this.$tweening == 2) {
if (this.$overlapSize.x > 0)
this.$xPos = utils.NumberUtil.clamp(-nx, 0, this.$overlapSize.x);
if (this.$overlapSize.y > 0)
this.$yPos = utils.NumberUtil.clamp(-ny, 0, this.$overlapSize.y);
if (this.$pageMode)
this.updatePageController();
}
if (this.$tweenChange.x == 0 && this.$tweenChange.y == 0) {
this.$tweening = 0;
GTimer.inst.remove(this.tweenUpdate, this);
this.loopCheckingCurrent();
this.syncScrollBar(true);
this.checkRefreshBar();
this.emit(ScrollEvent.SCROLL, this);
this.emit(ScrollEvent.SCROLL_END, this);
//Events.dispatch(Events.SCROLL, this.$owner.displayObject);
//Events.dispatch(Events.SCROLLthis.$END, this.$owner.displayObject);
}
else {
this.syncScrollBar(false);
this.checkRefreshBar();
this.emit(ScrollEvent.SCROLL, this);
//Events.dispatch(Events.SCROLL, this.$owner.displayObject);
}
}
private runTween(axis: string): number {
const delta:number = GTimer.inst.ticker.deltaTime;
let newValue: number;
if ((<IndexedObject>this.$tweenChange)[axis] != 0) {
(<IndexedObject>this.$tweenTime)[axis] += delta * PIXI.settings.TARGET_FPMS;
if ((<IndexedObject>this.$tweenTime)[axis] >= (<IndexedObject>this.$tweenDuration)[axis]) {
newValue = (<IndexedObject>this.$tweenStart)[axis] + (<IndexedObject>this.$tweenChange)[axis];
(<IndexedObject>this.$tweenChange)[axis] = 0;
}
else {
const ratio: number = ScrollPane.$easeTypeFunc((<IndexedObject>this.$tweenTime)[axis], (<IndexedObject>this.$tweenDuration)[axis]);
newValue = (<IndexedObject>this.$tweenStart)[axis] + Math.floor((<IndexedObject>this.$tweenChange)[axis] * ratio);
}
var threshold1: number = 0;
var threshold2: number = -(<IndexedObject>this.$overlapSize)[axis];
if (this.$headerLockedSize > 0 && this.$refreshBarAxis == axis)
threshold1 = this.$headerLockedSize;
if (this.$footerLockedSize > 0 && this.$refreshBarAxis == axis) {
var max: number = (<IndexedObject>this.$overlapSize)[this.$refreshBarAxis];
if (max == 0)
max = Math.max((<IndexedObject>this.$contentSize)[this.$refreshBarAxis] + this.$footerLockedSize - (<IndexedObject>this.$viewSize)[this.$refreshBarAxis], 0);
else
max += this.$footerLockedSize;
threshold2 = -max;
}
if (this.$tweening == 2 && this.$bouncebackEffect) {
if (newValue > 20 + threshold1 && (<IndexedObject>this.$tweenChange)[axis] > 0
|| newValue > threshold1 && (<IndexedObject>this.$tweenChange)[axis] == 0) //start to bounce
{
(<IndexedObject>this.$tweenTime)[axis] = 0;
(<IndexedObject>this.$tweenDuration)[axis] = ScrollPane.TWEEN_DEFAULT_DURATION;
(<IndexedObject>this.$tweenChange)[axis] = -newValue + threshold1;
(<IndexedObject>this.$tweenStart)[axis] = newValue;
}
else if (newValue < threshold2 - 20 && (<IndexedObject>this.$tweenChange)[axis] < 0
|| newValue < threshold2 && (<IndexedObject>this.$tweenChange)[axis] == 0)
{
(<IndexedObject>this.$tweenTime)[axis] = 0;
(<IndexedObject>this.$tweenDuration)[axis] = ScrollPane.TWEEN_DEFAULT_DURATION;
(<IndexedObject>this.$tweenChange)[axis] = threshold2 - newValue;
(<IndexedObject>this.$tweenStart)[axis] = newValue;
}
}
else {
if (newValue > threshold1) {
newValue = threshold1;
(<IndexedObject>this.$tweenChange)[axis] = 0;
}
else if (newValue < threshold2) {
newValue = threshold2;
(<IndexedObject>this.$tweenChange)[axis] = 0;
}
}
}
else
newValue = (<IndexedObject>this.$container)[axis];
return newValue;
}
}
} | the_stack |
import * as A from "../Collections/Immutable/Array"
import * as E from "../Either"
import type { Trace } from "../Fiber"
import type { FiberID } from "../Fiber/id"
import { identity, pipe } from "../Function"
import * as S from "../IO"
import * as O from "../Option"
import { Stack } from "../Stack"
import type { Both, Cause, Then, Traced } from "./cause"
import { both, die, empty, fail, interrupt, then, traced } from "./cause"
import { InterruptedException } from "./errors"
export {
both,
Cause,
die,
empty,
fail,
interrupt,
then,
traced,
isEmpty
} from "./cause"
/**
* Applicative's ap
*/
export function ap<A>(fa: Cause<A>): <B>(fab: Cause<(a: A) => B>) => Cause<B> {
return chain((f) => pipe(fa, map(f)))
}
/**
* Substitute the E in the cause
*/
export function as<E1>(e: E1) {
return map(() => e)
}
/**
* Builds a Cause depending on the result of another
*/
export function chain_<E, E1>(cause: Cause<E>, f: (_: E) => Cause<E1>): Cause<E1> {
return S.run(chainSafe_(cause, f))
}
/**
* Builds a Cause depending on the result of another
*/
export function chain<E, E1>(f: (_: E) => Cause<E1>) {
return (cause: Cause<E>): Cause<E1> => chain_(cause, f)
}
/**
* Builds a Cause depending on the result of another
*/
export function chainSafe_<E, E1>(
cause: Cause<E>,
f: (_: E) => Cause<E1>
): S.IO<Cause<E1>> {
switch (cause._tag) {
case "Empty": {
return S.succeed(empty)
}
case "Fail": {
return S.succeed(f(cause.value))
}
case "Die": {
return S.succeed(cause)
}
case "Interrupt": {
return S.succeed(cause)
}
case "Then": {
return S.zipWith_(
S.suspend(() => chainSafe_(cause.left, f)),
S.suspend(() => chainSafe_(cause.right, f)),
(l, r) => then(l, r)
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => chainSafe_(cause.left, f)),
S.suspend(() => chainSafe_(cause.right, f)),
(l, r) => both(l, r)
)
}
case "Traced": {
return S.map_(chainSafe_(cause.cause, f), (x) => traced(x, cause.trace))
}
}
}
/**
* Equivalent to chain((a) => Fail(f(a)))
*/
export function map_<E, E1>(cause: Cause<E>, f: (e: E) => E1) {
return chain_(cause, (e: E) => fail(f(e)))
}
/**
* Equivalent to chain((a) => Fail(f(a)))
*/
export function map<E, E1>(f: (e: E) => E1) {
return (cause: Cause<E>) => map_(cause, f)
}
/**
* Determines if this cause contains or is equal to the specified cause.
*/
export function contains<E, E1 extends E = E>(that: Cause<E1>) {
return (cause: Cause<E>) => S.run(containsSafe(that)(cause))
}
/**
* Determines if this cause contains or is equal to the specified cause.
*/
export function containsSafe<E, E1 extends E = E>(that: Cause<E1>) {
return (cause: Cause<E>) =>
S.gen(function* (_) {
if (yield* _(cause.equalsSafe(that))) {
return true
}
return yield* _(
pipe(
cause,
reduceLeft(S.succeed(false))((_, c) =>
O.some(S.chain_(_, (b) => (b ? S.succeed(b) : c.equalsSafe(that))))
)
)
)
})
}
/**
* Extracts a list of non-recoverable errors from the `Cause`.
*/
export function defects<E>(cause: Cause<E>): readonly unknown[] {
return pipe(
cause,
reduceLeft<readonly unknown[]>([])((a, c) =>
c._tag === "Die" ? O.some([...a, c.value]) : O.none
)
)
}
/**
* Returns the `Error` associated with the first `Die` in this `Cause` if
* one exists.
*/
export function dieOption<E>(cause: Cause<E>) {
return pipe(
cause,
find((c) => (c._tag === "Die" ? O.some(c.value) : O.none))
)
}
/**
* Returns if a cause contains a defect
*/
export function died<E>(cause: Cause<E>) {
return pipe(
cause,
dieOption,
O.map(() => true),
O.getOrElse(() => false)
)
}
/**
* Returns the `E` associated with the first `Fail` in this `Cause` if one
* exists.
*/
export function failureOption<E>(cause: Cause<E>) {
return pipe(
cause,
find((c) => (c._tag === "Fail" ? O.some(c.value) : O.none))
)
}
/**
* Returns if the cause has a failure in it
*/
export function failed<E>(cause: Cause<E>) {
return pipe(
cause,
failureOption,
O.map(() => true),
O.getOrElse(() => false)
)
}
/**
* Retrieve the first checked error on the `Left` if available,
* if there are no checked errors return the rest of the `Cause`
* that is known to contain only `Die` or `Interrupt` causes.
* */
export function failureOrCause<E>(cause: Cause<E>): E.Either<E, Cause<never>> {
return pipe(
cause,
failureOption,
O.map(E.left),
O.getOrElse(() => E.right(cause as Cause<never>)) // no E inside this cause, can safely cast
)
}
/**
* Produces a list of all recoverable errors `E` in the `Cause`.
*/
export function failures<E>(cause: Cause<E>) {
return pipe(
cause,
reduceLeft<readonly E[]>([])((a, c) =>
c._tag === "Fail" ? O.some([...a, c.value]) : O.none
)
)
}
/**
* Remove all `Die` causes that the specified partial function is defined at,
* returning `Some` with the remaining causes or `None` if there are no
* remaining causes.
*/
export function stripSomeDefects(f: (_: unknown) => O.Option<unknown>) {
return <E>(cause: Cause<E>): O.Option<Cause<E>> => {
return S.run(stripSomeDefectsSafe(cause, f))
}
}
/**
* Remove all `Die` causes that the specified partial function is defined at,
* returning `Some` with the remaining causes or `None` if there are no
* remaining causes.
*/
export function stripSomeDefects_<E>(
cause: Cause<E>,
f: (_: unknown) => O.Option<unknown>
): O.Option<Cause<E>> {
return S.run(stripSomeDefectsSafe(cause, f))
}
/**
* Filter out all `Die` causes according to the specified function,
* returning `Some` with the remaining causes or `None` if there are no
* remaining causes.
*/
export function stripSomeDefectsSafe<E>(
cause: Cause<E>,
f: (_: unknown) => O.Option<unknown>
): S.IO<O.Option<Cause<E>>> {
switch (cause._tag) {
case "Empty": {
return S.succeed(O.none)
}
case "Interrupt": {
return S.succeed(O.some(cause))
}
case "Fail": {
return S.succeed(O.some(cause))
}
case "Die": {
return S.succeed(O.map_(f(cause.value), die))
}
case "Both": {
return S.zipWith_(
S.suspend(() => stripSomeDefectsSafe(cause.left, f)),
S.suspend(() => stripSomeDefectsSafe(cause.right, f)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(both(l.value, r.value))
} else if (l._tag === "Some") {
return l
} else if (r._tag === "Some") {
return r
} else {
return O.none
}
}
)
}
case "Then": {
return S.zipWith_(
S.suspend(() => stripSomeDefectsSafe(cause.left, f)),
S.suspend(() => stripSomeDefectsSafe(cause.right, f)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(then(l.value, r.value))
} else if (l._tag === "Some") {
return l
} else if (r._tag === "Some") {
return r
} else {
return O.none
}
}
)
}
case "Traced": {
return S.suspend(() => stripSomeDefectsSafe(cause.cause, f))
}
}
}
/**
* Finds the first result matching f
*/
export function find<Z, E>(
f: (cause: Cause<E>) => O.Option<Z>
): (cause: Cause<E>) => O.Option<Z> {
return (cause) => S.run(findSafe(f)(cause))
}
/**
* Finds the first result matching f
*/
export function findSafe<Z, E>(
f: (cause: Cause<E>) => O.Option<Z>
): (cause: Cause<E>) => S.IO<O.Option<Z>> {
return (cause) => {
const apply = f(cause)
if (apply._tag === "Some") {
return S.succeed(apply)
}
switch (cause._tag) {
case "Then": {
return S.chain_(
S.suspend(() => findSafe(f)(cause.left)),
(isLeft) => {
if (isLeft._tag === "Some") {
return S.succeed(isLeft)
} else {
return findSafe(f)(cause.right)
}
}
)
}
case "Traced": {
return S.suspend(() => findSafe(f)(cause.cause))
}
case "Both": {
return S.chain_(
S.suspend(() => findSafe(f)(cause.left)),
(isLeft) => {
if (isLeft._tag === "Some") {
return S.succeed(isLeft)
} else {
return findSafe(f)(cause.right)
}
}
)
}
default: {
return S.succeed(apply)
}
}
}
}
/**
* Equivalent to chain(identity)
*/
export const flatten: <E>(cause: Cause<Cause<E>>) => Cause<E> = chain(identity)
/**
* Folds over a cause
*/
export function fold<E, Z>(
empty: () => Z,
failCase: (_: E) => Z,
dieCase: (_: unknown) => Z,
interruptCase: (_: FiberID) => Z,
thenCase: (_: Z, __: Z) => Z,
bothCase: (_: Z, __: Z) => Z,
tracedCase: (_: Z, __: Trace) => Z
) {
return (cause: Cause<E>): Z =>
S.run(
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause)
)
}
/**
* Folds over a cause
*/
export function foldSafe<E, Z>(
empty: () => Z,
failCase: (_: E) => Z,
dieCase: (_: unknown) => Z,
interruptCase: (_: FiberID) => Z,
thenCase: (_: Z, __: Z) => Z,
bothCase: (_: Z, __: Z) => Z,
tracedCase: (_: Z, __: Trace) => Z
) {
return (cause: Cause<E>): S.IO<Z> => {
switch (cause._tag) {
case "Empty": {
return S.succeedWith(empty)
}
case "Fail": {
return S.succeed(failCase(cause.value))
}
case "Die": {
return S.succeed(dieCase(cause.value))
}
case "Interrupt": {
return S.succeed(interruptCase(cause.fiberId))
}
case "Traced": {
return S.map_(
S.suspend(() =>
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause.cause)
),
(x) => tracedCase(x, cause.trace)
)
}
case "Both": {
return S.zipWith_(
S.suspend(() =>
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause.left)
),
S.suspend(() =>
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause.right)
),
(l, r) => bothCase(l, r)
)
}
case "Then": {
return S.zipWith_(
S.suspend(() =>
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause.left)
),
S.suspend(() =>
foldSafe(
empty,
failCase,
dieCase,
interruptCase,
thenCase,
bothCase,
tracedCase
)(cause.right)
),
(l, r) => thenCase(l, r)
)
}
}
}
}
/**
* Accumulates a state over a Cause
*/
export function reduceLeft<Z>(z: Z) {
return <E>(f: (z: Z, cause: Cause<E>) => O.Option<Z>): ((cause: Cause<E>) => Z) => {
return (cause) => {
let causes: Stack<Cause<E>> | undefined = undefined
let current: Cause<E> | undefined = cause
let acc = z
while (current) {
const x = f(acc, current)
acc = x._tag === "Some" ? x.value : acc
switch (current._tag) {
case "Then": {
causes = new Stack(current.right, causes)
current = current.left
break
}
case "Both": {
causes = new Stack(current.right, causes)
current = current.left
break
}
case "Traced": {
current = current.cause
break
}
default: {
current = undefined
break
}
}
if (!current && causes) {
current = causes.value
causes = causes.previous
}
}
return acc
}
}
}
/**
* Returns if the cause contains an interruption in it
*/
export function interrupted<E>(cause: Cause<E>) {
return pipe(
cause,
interruptOption,
O.map(() => true),
O.getOrElse(() => false)
)
}
/**
* Returns the `FiberID` associated with the first `Interrupt` in this `Cause` if one
* exists.
*/
export function interruptOption<E>(cause: Cause<E>) {
return pipe(
cause,
find((c) => (c._tag === "Interrupt" ? O.some(c.fiberId) : O.none))
)
}
/**
* Determines if the `Cause` contains only interruptions and not any `Die` or
* `Fail` causes.
*/
export function interruptedOnly<E>(cause: Cause<E>) {
return pipe(
cause,
find((c) => (c._tag === "Die" || c._tag === "Fail" ? O.some(false) : O.none)),
O.getOrElse(() => true)
)
}
/**
* Returns a set of interruptors, fibers that interrupted the fiber described
* by this `Cause`.
*/
export function interruptors<E>(cause: Cause<E>): readonly FiberID[] {
return Array.from(
pipe(
cause,
reduceLeft<Set<FiberID>>(new Set())((s, c) =>
c._tag === "Interrupt" ? O.some(s.add(c.fiberId)) : O.none
)
)
)
}
/**
* Remove all `Fail` and `Interrupt` nodes from this `Cause`,
* return only `Die` cause/finalizer defects.
*/
export function keepDefectsSafe<E>(cause: Cause<E>): S.IO<O.Option<Cause<never>>> {
switch (cause._tag) {
case "Empty": {
return S.succeed(O.none)
}
case "Fail": {
return S.succeed(O.none)
}
case "Interrupt": {
return S.succeed(O.none)
}
case "Die": {
return S.succeed(O.some(cause))
}
case "Traced": {
return S.map_(
S.suspend(() => keepDefectsSafe(cause.cause)),
(x) => O.map_(x, (_) => traced(_, cause.trace))
)
}
case "Then": {
return S.zipWith_(
S.suspend(() => keepDefectsSafe(cause.left)),
S.suspend(() => keepDefectsSafe(cause.right)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(then(l.value, r.value))
} else if (l._tag === "Some") {
return l
} else if (r._tag === "Some") {
return r
} else {
return O.none
}
}
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => keepDefectsSafe(cause.left)),
S.suspend(() => keepDefectsSafe(cause.right)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(both(l.value, r.value))
} else if (l._tag === "Some") {
return l
} else if (r._tag === "Some") {
return r
} else {
return O.none
}
}
)
}
}
}
/**
* Remove all `Fail` and `Interrupt` nodes from this `Cause`,
* return only `Die` cause/finalizer defects.
*/
export function keepDefects<E>(cause: Cause<E>): O.Option<Cause<never>> {
return S.run(keepDefectsSafe(cause))
}
/**
* Converts the specified `Cause<Either<E, A>>` to an `Either<Cause<E>, A>`.
*/
export function sequenceCauseEither<E, A>(
c: Cause<E.Either<E, A>>
): E.Either<Cause<E>, A> {
return S.run(sequenceCauseEitherSafe(c))
}
/**
* Converts the specified `Cause<Either<E, A>>` to an `Either<Cause<E>, A>`.
*/
export function sequenceCauseEitherSafe<E, A>(
c: Cause<E.Either<E, A>>
): S.IO<E.Either<Cause<E>, A>> {
switch (c._tag) {
case "Empty": {
return S.succeed(E.left(empty))
}
case "Interrupt": {
return S.succeed(E.left(c))
}
case "Fail": {
return S.succeed(
c.value._tag === "Left" ? E.left(fail(c.value.left)) : E.right(c.value.right)
)
}
case "Traced": {
return S.map_(
S.suspend(() => sequenceCauseEitherSafe(c.cause)),
(x) => E.mapLeft_(x, (_) => traced(_, c.trace))
)
}
case "Die": {
return S.succeed(E.left(c))
}
case "Then": {
return S.zipWith_(
S.suspend(() => sequenceCauseEitherSafe(c.left)),
S.suspend(() => sequenceCauseEitherSafe(c.right)),
(l, r) => {
if (l._tag === "Left") {
if (r._tag === "Right") {
return E.right(r.right)
} else {
return E.left(then(l.left, r.left))
}
} else {
return E.right(l.right)
}
}
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => sequenceCauseEitherSafe(c.left)),
S.suspend(() => sequenceCauseEitherSafe(c.right)),
(l, r) => {
if (l._tag === "Left") {
if (r._tag === "Right") {
return E.right(r.right)
} else {
return E.left(both(l.left, r.left))
}
} else {
return E.right(l.right)
}
}
)
}
}
}
/**
* Converts the specified `Cause<Option<E>>` to an `Option<Cause<E>>` by
* recursively stripping out any failures with the error `None`.
*/
export function sequenceCauseOptionSafe<E>(
c: Cause<O.Option<E>>
): S.IO<O.Option<Cause<E>>> {
switch (c._tag) {
case "Empty": {
return S.succeed(O.some(empty))
}
case "Interrupt": {
return S.succeed(O.some(c))
}
case "Traced": {
return S.map_(
S.suspend(() => sequenceCauseOptionSafe(c.cause)),
(x) => O.map_(x, (_) => traced(_, c.trace))
)
}
case "Fail": {
return S.succeed(O.map_(c.value, fail))
}
case "Die": {
return S.succeed(O.some(c))
}
case "Then": {
return S.zipWith_(
S.suspend(() => sequenceCauseOptionSafe(c.left)),
S.suspend(() => sequenceCauseOptionSafe(c.right)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(then(l.value, r.value))
} else if (l._tag === "Some") {
return O.some(l.value)
} else if (r._tag === "Some") {
return O.some(r.value)
} else {
return O.none
}
}
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => sequenceCauseOptionSafe(c.left)),
S.suspend(() => sequenceCauseOptionSafe(c.right)),
(l, r) => {
if (l._tag === "Some" && r._tag === "Some") {
return O.some(both(l.value, r.value))
} else if (l._tag === "Some") {
return O.some(l.value)
} else if (r._tag === "Some") {
return O.some(r.value)
} else {
return O.none
}
}
)
}
}
}
/**
* Converts the specified `Cause<Option<E>>` to an `Option<Cause<E>>` by
* recursively stripping out any failures with the error `None`.
*/
export function sequenceCauseOption<E>(c: Cause<O.Option<E>>): O.Option<Cause<E>> {
return S.run(sequenceCauseOptionSafe(c))
}
/**
* Squashes a `Cause` down to a single `Throwable`, chosen to be the
* "most important" `Throwable`.
*/
export function squash<E>(f: (e: E) => unknown) {
return (cause: Cause<E>): unknown =>
pipe(
cause,
failureOption,
O.map(f),
(o) =>
o._tag === "Some"
? o
: interrupted(cause)
? O.some<unknown>(
new InterruptedException(
"Interrupted by fibers: " +
Array.from(interruptors(cause))
.map((_) => _.seqNumber.toString())
.map((_) => "#" + _)
.join(", ")
)
)
: O.none,
(o) => (o._tag === "Some" ? o : A.head(defects(cause))),
O.getOrElse(() => new InterruptedException())
)
}
/**
* Discards all typed failures kept on this `Cause`.
*/
export function stripFailures<E>(cause: Cause<E>): Cause<never> {
switch (cause._tag) {
case "Empty": {
return empty
}
case "Fail": {
return empty
}
case "Interrupt": {
return cause
}
case "Die": {
return cause
}
default: {
return S.run(stripFailuresSafe(cause))
}
}
}
/**
* Discards all typed failures kept on this `Cause`.
*/
export function stripFailuresSafe<E>(cause: Cause<E>): S.IO<Cause<never>> {
switch (cause._tag) {
case "Empty": {
return S.succeed(empty)
}
case "Fail": {
return S.succeed(empty)
}
case "Interrupt": {
return S.succeed(cause)
}
case "Die": {
return S.succeed(cause)
}
case "Traced": {
return S.map_(
S.suspend(() => stripFailuresSafe(cause.cause)),
(x) => traced(x, cause.trace)
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => stripFailuresSafe(cause.left)),
S.suspend(() => stripFailuresSafe(cause.right)),
(l, r) => both(l, r)
)
}
case "Then": {
return S.zipWith_(
S.suspend(() => stripFailuresSafe(cause.left)),
S.suspend(() => stripFailuresSafe(cause.right)),
(l, r) => then(l, r)
)
}
}
}
/**
* Discards all typed failures kept on this `Cause`.
*/
export function stripInterrupts<E>(cause: Cause<E>): Cause<E> {
switch (cause._tag) {
case "Empty": {
return empty
}
case "Fail": {
return cause
}
case "Interrupt": {
return empty
}
case "Die": {
return cause
}
default: {
return S.run(stripInterruptsSafe(cause))
}
}
}
/**
* Discards all typed failures kept on this `Cause`.
*/
export function stripInterruptsSafe<E>(cause: Cause<E>): S.IO<Cause<E>> {
switch (cause._tag) {
case "Empty": {
return S.succeed(empty)
}
case "Fail": {
return S.succeed(cause)
}
case "Interrupt": {
return S.succeed(empty)
}
case "Die": {
return S.succeed(cause)
}
case "Traced": {
return S.map_(
S.suspend(() => stripInterruptsSafe(cause.cause)),
(x) => traced(x, cause.trace)
)
}
case "Both": {
return S.zipWith_(
S.suspend(() => stripInterruptsSafe(cause.left)),
S.suspend(() => stripInterruptsSafe(cause.right)),
(l, r) => both(l, r)
)
}
case "Then": {
return S.zipWith_(
S.suspend(() => stripInterruptsSafe(cause.left)),
S.suspend(() => stripInterruptsSafe(cause.right)),
(l, r) => then(l, r)
)
}
}
}
/**
* Returns a `Cause` that has been stripped of all tracing information.
*/
export function untraced<E>(cause: Cause<E>): Cause<E> {
switch (cause._tag) {
case "Die":
case "Empty":
case "Fail":
case "Interrupt":
return cause
default:
return S.run(untracedSafe(cause))
}
}
/**
* Returns a `Cause` that has been stripped of all tracing information.
*/
export function untracedSafe<E>(cause: Cause<E>): S.IO<Cause<E>> {
switch (cause._tag) {
case "Traced": {
return S.suspend(() => untracedSafe(cause.cause))
}
case "Both": {
return S.zipWith_(
S.suspend(() => untracedSafe(cause.left)),
S.suspend(() => untracedSafe(cause.right)),
(l, r) => both(l, r)
)
}
case "Then": {
return S.zipWith_(
S.suspend(() => untracedSafe(cause.left)),
S.suspend(() => untracedSafe(cause.right)),
(l, r) => then(l, r)
)
}
default: {
return S.succeed(cause)
}
}
}
const FCOStackFrameDoneTypeId = Symbol()
class FCOStackFrameDone {
readonly _typeId: typeof FCOStackFrameDoneTypeId = FCOStackFrameDoneTypeId
}
const FCOStackFrameTracedTypeId = Symbol()
class FCOStackFrameTraced<E> {
readonly _typeId: typeof FCOStackFrameTracedTypeId = FCOStackFrameTracedTypeId
constructor(readonly cause: Traced<O.Option<E>>) {}
}
const FCOStackFrameThenLeftTypeId = Symbol()
class FCOStackFrameThenLeft<E> {
readonly _typeId: typeof FCOStackFrameThenLeftTypeId = FCOStackFrameThenLeftTypeId
constructor(readonly cause: Then<O.Option<E>>) {}
}
const FCOStackFrameThenRightTypeId = Symbol()
class FCOStackFrameThenRight<E> {
readonly _typeId: typeof FCOStackFrameThenRightTypeId = FCOStackFrameThenRightTypeId
constructor(
readonly cause: Then<O.Option<E>>,
readonly leftResult: O.Option<Cause<E>>
) {}
}
const FCOStackFrameBothLeftTypeId = Symbol()
class FCOStackFrameBothLeft<E> {
readonly _typeId: typeof FCOStackFrameBothLeftTypeId = FCOStackFrameBothLeftTypeId
constructor(readonly cause: Both<O.Option<E>>) {}
}
const FCOStackFrameBothRightTypeId = Symbol()
class FCOStackFrameBothRight<E> {
readonly _typeId: typeof FCOStackFrameBothRightTypeId = FCOStackFrameBothRightTypeId
constructor(
readonly cause: Both<O.Option<E>>,
readonly leftResult: O.Option<Cause<E>>
) {}
}
type FCOStackFrame<E> =
| FCOStackFrameDone
| FCOStackFrameTraced<E>
| FCOStackFrameThenLeft<E>
| FCOStackFrameThenRight<E>
| FCOStackFrameBothLeft<E>
| FCOStackFrameBothRight<E>
/**
* Converts the specified `Cause<Either<E, A>>` to an `Either<Cause<E>, A>` by
* recursively stripping out any failures with the error `None`.
*/
export function flipCauseOption<E>(c: Cause<O.Option<E>>): O.Option<Cause<E>> {
let stack: Stack<FCOStackFrame<E>> = new Stack(new FCOStackFrameDone())
let result: O.Option<Cause<E>> | undefined
recursion: while (stack) {
// eslint-disable-next-line no-constant-condition
pushing: while (true) {
switch (c._tag) {
case "Empty":
result = O.some(empty)
break pushing
case "Traced":
stack = new Stack(new FCOStackFrameTraced(c), stack)
c = c.cause
continue pushing
case "Interrupt":
result = O.some(interrupt(c.fiberId))
break pushing
case "Die":
result = O.some(c)
break pushing
case "Fail":
result = O.fold_(
c.value,
() => O.none,
(r) => O.some(fail(r))
)
break pushing
case "Then":
stack = new Stack(new FCOStackFrameThenLeft(c), stack)
c = c.left
continue pushing
case "Both":
stack = new Stack(new FCOStackFrameBothLeft(c), stack)
c = c.left
continue pushing
}
}
// eslint-disable-next-line no-constant-condition
popping: while (true) {
const top = stack.value
stack = stack.previous!
switch (top._typeId) {
case FCOStackFrameDoneTypeId:
return result
case FCOStackFrameTracedTypeId:
result = O.map_(result, (_) => traced(_, top.cause.trace))
continue popping
case FCOStackFrameThenLeftTypeId:
c = top.cause.right
stack = new Stack(new FCOStackFrameThenRight(top.cause, result), stack)
continue recursion
case FCOStackFrameThenRightTypeId: {
const l = top.leftResult
if (O.isSome(l) && O.isSome(result)) {
result = O.some(then(l.value, result.value))
}
if (O.isNone(l) && O.isSome(result)) {
result = O.some(result.value)
}
if (O.isSome(l) && O.isNone(result)) {
result = O.some(l.value)
}
result = O.none
continue popping
}
case FCOStackFrameBothLeftTypeId:
c = top.cause.right
stack = new Stack(new FCOStackFrameBothRight(top.cause, result), stack)
continue recursion
case FCOStackFrameBothRightTypeId: {
const l = top.leftResult
if (O.isSome(l) && O.isSome(result)) {
result = O.some(both(l.value, result.value))
}
if (O.isNone(l) && O.isSome(result)) {
result = O.some(result.value)
}
if (O.isSome(l) && O.isNone(result)) {
result = O.some(l.value)
}
result = O.none
continue popping
}
}
}
}
throw new Error("Bug")
} | the_stack |
import {
prefix, getLineStyle, getDirection, getAbsolutePosesByState,
triggerEvent, fillParams, fillEndParams,
} from "../utils";
import {
convertDimension, invert, multiply,
calculate,
createIdentityMatrix,
ignoreDimension,
minus,
createWarpMatrix,
plus,
} from "@scena/matrix";
import { NEARBY_POS } from "../consts";
import {
setDragStart, getDragDist, getPosIndexesByDirection, setDefaultTransformIndex,
fillTransformStartEvent, resolveTransformEvent,
convertTransformFormat, fillOriginalTransform, getTransfromMatrix,
} from "../gesto/GestoUtils";
import {
WarpableProps, ScalableProps, ResizableProps,
Renderer, SnappableProps, SnappableState,
OnWarpStart, OnWarp, OnWarpEnd, MoveableManagerInterface,
} from "../types";
import { hasClass, dot, getRad } from "@daybrush/utils";
import { renderAllDirections } from "../renderDirections";
import { hasGuidelines, checkMoveableSnapBounds } from "./Snappable";
function getMiddleLinePos(pos1: number[], pos2: number[]) {
return pos1.map((pos, i) => dot(pos, pos2[i], 1, 2));
}
function getTriangleRad(pos1: number[], pos2: number[], pos3: number[]) {
// pos1 Rad
const rad1 = getRad(pos1, pos2);
const rad2 = getRad(pos1, pos3);
const rad = rad2 - rad1;
return rad >= 0 ? rad : rad + 2 * Math.PI;
}
function isValidPos(poses1: number[][], poses2: number[][]) {
const rad1 = getTriangleRad(poses1[0], poses1[1], poses1[2]);
const rad2 = getTriangleRad(poses2[0], poses2[1], poses2[2]);
const pi = Math.PI;
if ((rad1 >= pi && rad2 <= pi) || (rad1 <= pi && rad2 >= pi)) {
return false;
}
return true;
}
/**
* @namespace Moveable.Warpable
* @description Warpable indicates whether the target can be warped(distorted, bented).
*/
export default {
name: "warpable",
ableGroup: "size",
props: {
warpable: Boolean,
renderDirections: Array,
} as const,
events: {
onWarpStart: "warpStart",
onWarp: "warp",
onWarpEnd: "warpEnd",
} as const,
render(moveable: MoveableManagerInterface<ResizableProps & ScalableProps & WarpableProps>, React: Renderer): any[] {
const { resizable, scalable, warpable, zoom } = moveable.props;
if (resizable || scalable || !warpable) {
return [];
}
const { pos1, pos2, pos3, pos4 } = moveable.state;
const linePosFrom1 = getMiddleLinePos(pos1, pos2);
const linePosFrom2 = getMiddleLinePos(pos2, pos1);
const linePosFrom3 = getMiddleLinePos(pos1, pos3);
const linePosFrom4 = getMiddleLinePos(pos3, pos1);
const linePosTo1 = getMiddleLinePos(pos3, pos4);
const linePosTo2 = getMiddleLinePos(pos4, pos3);
const linePosTo3 = getMiddleLinePos(pos2, pos4);
const linePosTo4 = getMiddleLinePos(pos4, pos2);
return [
<div className={prefix("line")}
key="middeLine1" style={getLineStyle(linePosFrom1, linePosTo1, zoom)}></div>,
<div className={prefix("line")}
key="middeLine2" style={getLineStyle(linePosFrom2, linePosTo2, zoom)}></div>,
<div className={prefix("line")}
key="middeLine3" style={getLineStyle(linePosFrom3, linePosTo3, zoom)}></div>,
<div className={prefix("line")}
key="middeLine4" style={getLineStyle(linePosFrom4, linePosTo4, zoom)}></div>,
...renderAllDirections(moveable, React),
];
},
dragControlCondition(moveable: any, e: any) {
if (e.isRequest) {
return false;
}
return hasClass(e.inputEvent.target, prefix("direction"));
},
dragControlStart(
moveable: MoveableManagerInterface<WarpableProps, SnappableState>,
e: any,
) {
const { datas, inputEvent } = e;
const { target } = moveable.props;
const { target: inputTarget } = inputEvent;
const direction = getDirection(inputTarget);
if (!direction || !target) {
return false;
}
const state = moveable.state;
const {
transformOrigin, is3d,
targetTransform, targetMatrix,
width, height,
left, top,
} = state;
datas.datas = {};
datas.targetTransform = targetTransform;
datas.warpTargetMatrix = is3d ? targetMatrix : convertDimension(targetMatrix, 3, 4);
datas.targetInverseMatrix = ignoreDimension(invert(datas.warpTargetMatrix, 4), 3, 4);
datas.direction = direction;
datas.left = left;
datas.top = top;
datas.poses = [
[0, 0],
[width, 0],
[0, height],
[width, height],
].map(p => minus(p, transformOrigin));
datas.nextPoses = datas.poses.map(([x, y]: number[]) => calculate(datas.warpTargetMatrix, [x, y, 0, 1], 4));
datas.startValue = createIdentityMatrix(4);
datas.prevMatrix = createIdentityMatrix(4);
datas.absolutePoses = getAbsolutePosesByState(state);
datas.posIndexes = getPosIndexesByDirection(direction);
setDragStart(moveable, e);
setDefaultTransformIndex(e, "matrix3d");
state.snapRenderInfo = {
request: e.isRequest,
direction,
};
const params = fillParams<OnWarpStart>(moveable, e, {
set: (matrix: number[]) => {
datas.startValue = matrix;
},
...fillTransformStartEvent(e),
});
const result = triggerEvent(moveable, "onWarpStart", params);
if (result !== false) {
datas.isWarp = true;
}
return datas.isWarp;
},
dragControl(
moveable: MoveableManagerInterface<WarpableProps & SnappableProps, SnappableState>,
e: any,
) {
const { datas, isRequest } = e;
let { distX, distY } = e;
const {
targetInverseMatrix, prevMatrix, isWarp, startValue,
poses,
posIndexes,
absolutePoses,
} = datas;
if (!isWarp) {
return false;
}
resolveTransformEvent(e, "matrix3d");
if (hasGuidelines(moveable, "warpable")) {
const selectedPoses: number[][] = posIndexes.map((index: number) => absolutePoses[index]);
if (selectedPoses.length > 1) {
selectedPoses.push([
(selectedPoses[0][0] + selectedPoses[1][0]) / 2,
(selectedPoses[0][1] + selectedPoses[1][1]) / 2,
]);
}
const {
horizontal: horizontalSnapInfo,
vertical: verticalSnapInfo,
} = checkMoveableSnapBounds(
moveable,
isRequest,
selectedPoses.map(pos => [pos[0] + distX, pos[1] + distY]),
);
distY -= horizontalSnapInfo.offset;
distX -= verticalSnapInfo.offset;
}
const dist = getDragDist({ datas, distX, distY }, true);
const nextPoses = datas.nextPoses.slice();
posIndexes.forEach((index: number) => {
nextPoses[index] = plus(nextPoses[index], dist);
});
if (!NEARBY_POS.every(
nearByPoses => isValidPos(nearByPoses.map(i => poses[i]), nearByPoses.map(i => nextPoses[i])),
)) {
return false;
}
const h = createWarpMatrix(
poses[0],
poses[2],
poses[1],
poses[3],
nextPoses[0],
nextPoses[2],
nextPoses[1],
nextPoses[3],
);
if (!h.length) {
return false;
}
// B * A * M
const afterMatrix = multiply(targetInverseMatrix, h, 4);
// B * M * A
const matrix = getTransfromMatrix(datas, afterMatrix, true);
const delta = multiply(invert(prevMatrix, 4), matrix, 4);
datas.prevMatrix = matrix;
const totalMatrix = multiply(startValue, matrix, 4);
const nextTransform = convertTransformFormat(
datas, `matrix3d(${totalMatrix.join(", ")})`, `matrix3d(${matrix.join(", ")})`);
fillOriginalTransform(e, nextTransform);
triggerEvent(moveable, "onWarp", fillParams<OnWarp>(moveable, e, {
delta,
matrix: totalMatrix,
dist: matrix,
multiply,
transform: nextTransform,
}));
return true;
},
dragControlEnd(
moveable: MoveableManagerInterface<WarpableProps>,
e: any,
) {
const { datas, isDrag } = e;
if (!datas.isWarp) {
return false;
}
datas.isWarp = false;
triggerEvent(moveable, "onWarpEnd", fillEndParams<OnWarpEnd>(moveable, e, {}));
return isDrag;
},
};
/**
* Whether or not target can be warped. (default: false)
* @name Moveable.Warpable#warpable
* @example
* import Moveable from "moveable";
*
* const moveable = new Moveable(document.body);
*
* moveable.warpable = true;
*/
/**
* Set directions to show the control box. (default: ["n", "nw", "ne", "s", "se", "sw", "e", "w"])
* @name Moveable.Warpable#renderDirections
* @example
* import Moveable from "moveable";
*
* const moveable = new Moveable(document.body, {
* warpable: true,
* renderDirections: ["n", "nw", "ne", "s", "se", "sw", "e", "w"],
* });
*
* moveable.renderDirections = ["nw", "ne", "sw", "se"];
*/
/**
* When the warp starts, the warpStart event is called.
* @memberof Moveable.Warpable
* @event warpStart
* @param {Moveable.Warpable.OnWarpStart} - Parameters for the warpStart event
* @example
* import Moveable from "moveable";
*
* const moveable = new Moveable(document.body, { warpable: true });
* moveable.on("warpStart", ({ target }) => {
* console.log(target);
* });
*/
/**
* When warping, the warp event is called.
* @memberof Moveable.Warpable
* @event warp
* @param {Moveable.Warpable.OnWarp} - Parameters for the warp event
* @example
* import Moveable from "moveable";
* let matrix = [
* 1, 0, 0, 0,
* 0, 1, 0, 0,
* 0, 0, 1, 0,
* 0, 0, 0, 1,
* ];
* const moveable = new Moveable(document.body, { warpable: true });
* moveable.on("warp", ({ target, transform, delta, multiply }) => {
* // target.style.transform = transform;
* matrix = multiply(matrix, delta);
* target.style.transform = `matrix3d(${matrix.join(",")})`;
* });
*/
/**
* When the warp finishes, the warpEnd event is called.
* @memberof Moveable.Warpable
* @event warpEnd
* @param {Moveable.Warpable.OnWarpEnd} - Parameters for the warpEnd event
* @example
* import Moveable from "moveable";
*
* const moveable = new Moveable(document.body, { warpable: true });
* moveable.on("warpEnd", ({ target, isDrag }) => {
* console.log(target, isDrag);
* });
*/ | the_stack |
import { IRGB, IHSL } from './color.types';
import { rgb2hsl, hsl2rgb } from './color.hsl';
import { cssColor, rgbToString } from './color';
/**
* This file contains a set of color/contrast utilities
* It turns out colors are hard! These utilities will hopefully come in handy
*
* Some optimizations and approximations for color space transforms and values
* could be interesting to explore and use here
*/
/**
* An ISuggestionRange is an interface internal to the utilities in this file
* It is primarily used to denote an acceptable range of relative luminance values
*/
interface ISuggestionRange {
min: number;
max: number;
}
/**
* Converts an r, g, or b value in the sRGB color space to the corresponding value in linearRGB
* This is necessary for relative luminance calculations
* Formula defined at https://en.wikipedia.org/wiki/SRGB
*
* @param c - one of r g or b coming from sRGB
*/
function standardToLinear(c: number): number {
return c <= 0.03928 ? c / 12.92 : Math.pow((c + 0.055) / 1.055, 2.4);
}
/**
* Calculate the relative luminance which is how bright the color is from the perspective of
* a human eye. Blue is much darker than green for instance so (0, 0, 255) is perceived to be
* significantly darker than (0, 255, 0). This is used to calculate contrast ratios between
* two colors to ensure text is readable.
* @param r - standard red value 0 to 255
* @param g - standard green value 0 to 255
* @param b - standard blue value 0 to 255
*/
export function relativeLuminance(r: number, g: number, b: number): number {
// Formula defined by: http://www.w3.org/TR/UNDERSTANDING-WCAG20/visual-audio-contrast-contrast.html#contrast-ratiodef
// relative luminance: http://www.w3.org/TR/2008/REC-WCAG20-20081211/#relativeluminancedef
// get the effective radius for each color
const r1 = standardToLinear(r / 255);
const g1 = standardToLinear(g / 255);
const b1 = standardToLinear(b / 255);
// relative luminance adjusts the R/G/B values by modifiers for their perceived brightness
// to produce lightness result for how the eye perceives the color
return 0.2126 * r1 + 0.7152 * g1 + 0.0722 * b1;
}
/**
* A contrast ratio calculator
* Contrast ratios (text on background) are fundamental for determining the readability of text
* Formula from https://www.w3.org/TR/UNDERSTANDING-WCAG20/visual-audio-contrast-contrast.html
*
* @param relLumA - a relative luminance value
* @param relLumB - a relative luminacne value
*/
export function contrastRatio(relLumA: number, relLumB: number): number {
const lighter: number = relLumA > relLumB ? relLumA : relLumB;
const darker: number = relLumA > relLumB ? relLumB : relLumA;
return (lighter + 0.05) / (darker + 0.05);
}
/**
* A wrapper around contrast ratio calculation using the IRGB interface
*
* @param c1 - first color value
* @param c2 - second color value
*/
export function calcContrastRatio(c1: IRGB, c2: IRGB): number {
const relC1: number = relativeLuminance(c1.r, c1.g, c1.b);
const relC2: number = relativeLuminance(c2.r, c2.g, c2.b);
return contrastRatio(relC1, relC2);
}
/**
* TODO: There are cases where the desired ratios can be achieved by going either lighter or darker
* It may be cool and not too much work to add an additional argument allowing consumers to choose
* get lighter or darker if that choice is there to be made
*
* Returns a suggested relative luminance range given a constant color
* Note that it is possible that the desired ratio is unachievable
* In these cases this function will return a [-1, -1] or [2, 2] range
* The different ranges are used to default to white or black in the exposed adjustForContrast
*
* @param color - the constant IColor upon which we want to contrast with
* @param desiredRatio - a contrast ratio (generally from some accesibility standard)
*/
function getContrastingLuminanceRange(color: IRGB, desiredRatio: number): ISuggestionRange {
const relLum: number = relativeLuminance(color.r, color.g, color.b);
// when background is lighter, solve for darker
let suggestion: number = (relLum + 0.05) / desiredRatio - 0.05;
if (suggestion > 0 && suggestion < 1) {
return { min: 0, max: suggestion };
}
// when background is darker, text needs to be lighter
suggestion = desiredRatio * (relLum + 0.05) - 0.05;
if (suggestion < 1 && suggestion > 0) {
return { min: suggestion, max: 1 };
}
// We can't achieve the desired ratio
return { min: -1, max: -1 };
}
/**
* TODO: There are some very interesting alternatives that can be explored
* Transforms into XYZ and LAB color spaces prior to scaling across a single dimension
* The problem is LAB is a larger color space and the projected values may not actually
* match desired relative luminance (still might be worth exploring)
* TODO: If IColor is THE color interface we would like people to use, this should return it
*
* This is the core contrast adjusting algorithm
* It will take an IRGB and return a transformed version
* The new version will fall in the suggested relative luminance range
* But will maintain the same tone (hue and saturation in this case)
* It does so by transforming to an IHSL and searching across L values for a proper relative luminance
*
* @param color - a baseline color of which the returned color will maintain its hue and saturation
* @param suggestedRelLuminance - a luminance range to use
*/
function contrastAdjust(color: IRGB, suggestedRelLuminance: ISuggestionRange): IRGB {
// it is possible that the current color meets the suggested relative luminance
let currRelLuminance: number = relativeLuminance(color.r, color.g, color.b);
if (
currRelLuminance >= suggestedRelLuminance.min &&
currRelLuminance <= suggestedRelLuminance.max
) {
return { r: color.r, g: color.g, b: color.b }; // make a copy to be safe
}
const hsl: IHSL = rgb2hsl(color);
// allow for a .01 (totally arbitrary) error bound, also a good cutting off point
// the error bound is safe as it will eventually result in an overcautios contrast ratio
// and cap from 0 to 1 as relative luminance is normalized against that range
const desiredMin: number = Math.max(suggestedRelLuminance.min - 0.01, 0);
const desiredMax: number = Math.min(suggestedRelLuminance.max + 0.01, 1);
// binary search across l values
let minL: number = currRelLuminance < desiredMin ? hsl.l : 0;
let maxL: number = currRelLuminance > desiredMax ? hsl.l : 1;
let rgbFinal: IRGB = { r: 0, g: 0, b: 0 }; // default to black
while (currRelLuminance < desiredMin || currRelLuminance > desiredMax) {
hsl.l = (maxL + minL) / 2;
rgbFinal = hsl2rgb(hsl);
currRelLuminance = relativeLuminance(rgbFinal.r, rgbFinal.g, rgbFinal.b);
if (currRelLuminance > desiredMax) {
maxL = (maxL + minL) / 2;
} else if (currRelLuminance < desiredMin) {
minL = (maxL + minL) / 2;
}
}
return rgbFinal;
}
/**
* If possible, this will return a valid color to use to contrast against a background
* The returned color attempts to maintain the chromaticity of the baseline color
* If the desired ratio is unachievable white or black (dependent on target's relative luminance)
* will be used
*
* @param textColor - a color value serving as a baseline for the tone (hue and saturation) to maintain
* @param backgroundColor - the target to contrast against
* @param desiredRatio - a desired contrast ratio (default is WCAG 2 AA standard for normal text)
*/
export function adjustForContrast(baseline: IRGB, target: IRGB, desiredRatio: number = 4.5): IRGB {
const desiredRelLuminance: ISuggestionRange = getContrastingLuminanceRange(target, desiredRatio);
// default to black or white
if (desiredRelLuminance.min === -1) {
// go to black
return { r: 0, g: 0, b: 0 };
}
if (desiredRelLuminance.min === 2) {
// go to white
return { r: 255, g: 255, b: 255 };
}
return contrastAdjust(baseline, desiredRelLuminance);
}
/**
* Gets a (potentially cached) rgb value for a given string
* @param lookup - lookup table for looking up and caching name to rgb values
* @param color - color string to lookup in the table
*/
function _getRgbForColor(lookup: IContrastCache['rgbLookup'], color: string): IRGB {
if (!lookup[color]) {
const colorAsRgb = cssColor(color);
lookup[color] = colorAsRgb || { r: 0, b: 0, g: 0 };
}
return lookup[color];
}
export type RequiredContrast = 'low' | 'medium' | 'high';
const _contrastDefaults: { [K in RequiredContrast]: number } = {
low: 3.0,
medium: 4.5,
high: 6.0,
};
/**
* internal interface for caching contrast adjusted values. This has a lookup table for converting
* strings into rgb values and a cache for remembering previously calculated values since the
* actual luminance adjustment process is potentially expensive and because the calculations will
* always be constant for a given pair of values.
*/
interface IContrastCache {
rgbLookup: {
[color: string]: IRGB;
};
cache: {
[bgColor: string]: {
[fgColor: string]: { [K in RequiredContrast]?: string };
};
};
}
/**
* internal cache object
*/
const _contrastCache: IContrastCache = {
rgbLookup: {},
cache: {},
};
/**
* Take two strings representing a foreground and background color and potentially return a new foreground
* color value which has an acceptable level of contrast with the background. Because this can be expensive
* it has an internal cache.
* @param color - foreground color to potentially adjust for contrast
* @param backgroundColor - background color to that the color needs to be shown on
* @param desiredRatio - desired contrast ratio, defaults to 4.5
*/
export function getContrastingColor(
color: string,
backgroundColor: string,
requiredContrast: RequiredContrast = 'medium',
): string {
const desiredRatio = _contrastDefaults[requiredContrast];
const cache = _contrastCache.cache;
/* eslint-disable no-multi-assign */
const bgEntry = (cache[backgroundColor] = cache[backgroundColor] || {});
const fgEntry = (bgEntry[color] = bgEntry[color] || {});
if (!fgEntry[requiredContrast]) {
const rgbLookup = _contrastCache.rgbLookup;
const fg = _getRgbForColor(rgbLookup, color);
const bg = _getRgbForColor(rgbLookup, backgroundColor);
const newFg = adjustForContrast(fg, bg, desiredRatio);
fgEntry[requiredContrast] = rgbToString(newFg.r, newFg.g, newFg.b);
}
return fgEntry[requiredContrast]!;
} | the_stack |
import Component from '@glimmer/component';
import { getOwner } from '@ember/application';
import {
IWorkflowMessage,
Milestone,
NetworkAwareWorkflowCard,
NetworkAwareWorkflowMessage,
PostableCollection,
Workflow,
WorkflowCard,
WorkflowMessage,
WorkflowName,
WorkflowPostable,
conditionalCancelationMessage,
} from '@cardstack/web-client/models/workflow';
import Layer1Network from '@cardstack/web-client/services/layer1-network';
import Layer2Network from '@cardstack/web-client/services/layer2-network';
import { inject as service } from '@ember/service';
import RouterService from '@ember/routing/router-service';
import { currentNetworkDisplayInfo as c } from '@cardstack/web-client/utils/web3-strategies/network-display-info';
import { capitalize } from '@ember/string';
import BN from 'bn.js';
import { tracked } from '@glimmer/tracking';
import { taskFor } from 'ember-concurrency-ts';
import {
rawTimeout,
TaskGenerator,
waitForProperty,
waitForQueue,
} from 'ember-concurrency';
import { task } from 'ember-concurrency-decorators';
import { formatWeiAmount } from '@cardstack/web-client/helpers/format-wei-amount';
import { action } from '@ember/object';
import { standardCancelationPostables } from '@cardstack/web-client/models/workflow/cancelation-helpers';
const FAILURE_REASONS = {
DISCONNECTED: 'DISCONNECTED',
ACCOUNT_CHANGED: 'ACCOUNT_CHANGED',
RESTORATION_L1_ADDRESS_CHANGED: 'RESTORATION_L1_ADDRESS_CHANGED',
RESTORATION_L1_DISCONNECTED: 'RESTORATION_L1_DISCONNECTED',
RESTORATION_L2_ADDRESS_CHANGED: 'RESTORATION_L2_ADDRESS_CHANGED',
RESTORATION_L2_DISCONNECTED: 'RESTORATION_L2_DISCONNECTED',
} as const;
export const MILESTONE_TITLES = [
`Connect ${c.layer1.conversationalName} wallet`,
`Check ${c.layer1.nativeTokenSymbol} balance`,
`Connect ${c.layer2.conversationalName} wallet`,
`Withdraw from ${c.layer2.conversationalName}`,
`Bridge tokens to ${c.layer1.conversationalName}`,
`Claim tokens on ${c.layer1.conversationalName}`,
];
export const WORKFLOW_VERSION = 5;
class CheckBalanceWorkflowMessage
extends WorkflowPostable
implements IWorkflowMessage
{
@tracked minimumBalanceForWithdrawalClaim: BN | undefined;
cardName = 'CHECK_BALANCE_MESSAGE';
constructor() {
super();
taskFor(this.fetchMininumBalanceForWithdrawalClaimTask).perform();
}
@task
*fetchMininumBalanceForWithdrawalClaimTask() {
yield waitForQueue('afterRender'); // avoid error from using and setting workflow in the render queue
yield waitForProperty(this, 'layer1Network', Boolean);
// couldn't use waitForProperty for the layer1Network.defaultTokenBalance because waitForProperty is not reliable for tracked properties
yield taskFor(this.waitUntilTask).perform(
() => !!this.layer1Network.defaultTokenBalance
);
// HACK: We are passing "DAI" in the next line, but the user hasn't actually specified what token they will be withdrawing yet.
let minimum: BN =
yield this.layer1Network.getEstimatedGasForWithdrawalClaim('DAI');
this.minimumBalanceForWithdrawalClaim = minimum;
this.workflow?.session.setValue(
'minimumBalanceForWithdrawalClaim',
minimum
);
this.isComplete = true;
}
get message() {
let { layer1Network, minimumBalanceForWithdrawalClaim } = this;
if (
layer1Network.defaultTokenBalance === undefined ||
minimumBalanceForWithdrawalClaim === undefined
) {
return 'Checking your balance...';
}
if (
layer1Network.defaultTokenBalance!.gte(minimumBalanceForWithdrawalClaim)!
) {
return `Checking your balance...
It looks like you have enough ${
c.layer1.nativeTokenSymbol
} in your account on ${
c.layer1.fullName
} to perform the last step of this withdrawal workflow, which requires ~${formatWeiAmount(
minimumBalanceForWithdrawalClaim,
false
)} ${c.layer1.nativeTokenSymbol}.`;
} else {
return `Checking your balance...
The last step of this withdrawal requires that you have at least **~${formatWeiAmount(
minimumBalanceForWithdrawalClaim,
false
)} ${c.layer1.nativeTokenSymbol}**.
You only have **${formatWeiAmount(
layer1Network.defaultTokenBalance,
false
)} ${c.layer1.nativeTokenSymbol}**. You will need to deposit more
${
c.layer1.nativeTokenSymbol
} to your account shown below to continue the withdrawal.`;
}
}
get layer1Network() {
let workflow = this.workflow as WithdrawalWorkflow;
return workflow?.layer1Network;
}
@task *waitUntilTask(
predicate: () => boolean,
delayMs = 1000
): TaskGenerator<void> {
while (!predicate()) {
yield rawTimeout(delayMs);
}
}
}
export class WithdrawalWorkflow extends Workflow {
@service declare layer1Network: Layer1Network;
@service declare layer2Network: Layer2Network;
name = 'WITHDRAWAL' as WorkflowName;
version = WORKFLOW_VERSION;
milestones = [
new Milestone({
title: MILESTONE_TITLES[0],
postables: [
new WorkflowMessage({
message: 'Hi there, it’s good to see you!',
}),
new WorkflowMessage({
message: `In order to make a withdrawal, you need to connect two wallets:
* **${c.layer1.fullName} wallet:**
Linked to the ${c.layer1.shortName} blockchain on ${c.layer1.conversationalName}
* **${c.layer2.fullName} wallet:**
Linked to the ${c.layer2.shortName} blockchain for low-cost transactions
`,
}),
new NetworkAwareWorkflowMessage({
message: `Looks like you’ve already connected your ${c.layer1.fullName} wallet, which you can see below.
Please continue with the next step of this workflow.`,
includeIf() {
return this.hasLayer1Account;
},
}),
new NetworkAwareWorkflowCard({
cardName: 'LAYER1_CONNECT',
componentName: 'card-pay/layer-one-connect-card',
}),
],
completedDetail: `${capitalize(
c.layer1.conversationalName
)} wallet connected`,
}),
new Milestone({
title: MILESTONE_TITLES[1],
postables: [
new CheckBalanceWorkflowMessage(),
new WorkflowCard({
cardName: 'CHECK_BALANCE',
componentName: 'card-pay/withdrawal-workflow/check-balance',
}),
],
completedDetail: `${c.layer1.nativeTokenSymbol} balance checked`,
}),
new Milestone({
title: MILESTONE_TITLES[2],
postables: [
new NetworkAwareWorkflowMessage({
message: `Looks like you’ve already connected your ${c.layer2.fullName} wallet, which you can see below.
Please continue with the next step of this workflow.`,
includeIf() {
return this.hasLayer2Account;
},
}),
new NetworkAwareWorkflowMessage({
message: `You have connected your ${c.layer1.fullName} wallet. Now it’s time to connect your ${c.layer2.fullName}
wallet via your Card Wallet mobile app. If you don’t have the app installed, please do so now.`,
includeIf() {
return !this.hasLayer2Account;
},
}),
new NetworkAwareWorkflowMessage({
message: `Once you have installed the app, open the app and add an existing wallet/account or create a
new wallet/account. Use your account to scan this QR code, which will connect your account
with Card Pay.`,
includeIf() {
return !this.hasLayer2Account;
},
}),
new WorkflowCard({
cardName: 'LAYER2_CONNECT',
componentName: 'card-pay/layer-two-connect-card',
}),
],
completedDetail: `${c.layer2.conversationalName} wallet connected`,
}),
new Milestone({
title: MILESTONE_TITLES[3],
postables: [
new WorkflowMessage({
message: `Please choose the asset you would like to withdraw.`,
}),
new WorkflowCard({
cardName: 'CHOOSE_BALANCE',
componentName: 'card-pay/withdrawal-workflow/choose-balance',
}),
new WorkflowMessage({
message: 'How much would you like to withdraw from your balance?',
}),
new WorkflowCard({
cardName: 'TRANSACTION_AMOUNT',
componentName: 'card-pay/withdrawal-workflow/transaction-amount',
}),
],
completedDetail: `Withdrawn from ${c.layer2.conversationalName}`,
}),
new Milestone({
title: MILESTONE_TITLES[4],
postables: [
new WorkflowMessage({
message: `Now that you have withdrawn funds from the ${c.layer2.fullName},
your tokens will be bridged to ${c.layer1.fullName}. You can check the status below.`,
}),
new WorkflowCard({
cardName: 'TRANSACTION_STATUS',
componentName: 'card-pay/withdrawal-workflow/transaction-status',
}),
],
completedDetail: `Tokens bridged to ${c.layer1.conversationalName}`,
}),
new Milestone({
title: MILESTONE_TITLES[5],
postables: [
new WorkflowMessage({
message: `As a final step, please sign this transaction to claim the bridged tokens into your
${c.layer1.fullName} wallet. You will have to pay ${c.layer1.conversationalName} gas fee for this operation.`,
}),
new WorkflowCard({
cardName: 'TOKEN_CLAIM',
componentName: 'card-pay/withdrawal-workflow/token-claim',
}),
],
completedDetail: `Tokens claimed on ${c.layer1.conversationalName}`,
}),
];
epilogue = new PostableCollection([
new WorkflowMessage({
message: `Congrats! Your withdrawal is complete.`,
}),
new WorkflowCard({
cardName: 'TRANSACTION_CONFIRMED',
componentName: 'card-pay/withdrawal-workflow/transaction-confirmed',
}),
new WorkflowMessage({
message: `This is the remaining balance in your ${c.layer2.fullName} wallet:`,
}),
new WorkflowCard({
cardName: 'EPILOGUE_SAFE_BALANCE_CARD',
componentName: 'card-pay/safe-balance-card',
config: {
safeAddressKey: 'withdrawalSafe',
},
}),
new WorkflowCard({
cardName: 'EPILOGUE_NEXT_STEPS',
componentName: 'card-pay/withdrawal-workflow/next-steps',
}),
]);
cancelationMessages = new PostableCollection([
conditionalCancelationMessage({
forReason: FAILURE_REASONS.DISCONNECTED,
message:
'It looks like your wallet(s) got disconnected. If you still want to withdraw tokens, please start again by connecting your wallet(s).',
}),
conditionalCancelationMessage({
forReason: FAILURE_REASONS.ACCOUNT_CHANGED,
message:
'It looks like you changed accounts in the middle of this workflow. If you still want to withdraw funds, please restart the workflow.',
}),
conditionalCancelationMessage({
forReason: FAILURE_REASONS.RESTORATION_L1_ADDRESS_CHANGED,
message:
'You attempted to restore an unfinished workflow, but you changed your Layer 1 wallet address. Please restart the workflow.',
}),
conditionalCancelationMessage({
forReason: FAILURE_REASONS.RESTORATION_L2_ADDRESS_CHANGED,
message:
'You attempted to restore an unfinished workflow, but you changed your Card Wallet address. Please restart the workflow.',
}),
conditionalCancelationMessage({
forReason: FAILURE_REASONS.RESTORATION_L2_DISCONNECTED,
message:
'You attempted to restore an unfinished workflow, but your Card Wallet got disconnected. Please restart the workflow.',
}),
conditionalCancelationMessage({
forReason: FAILURE_REASONS.RESTORATION_L1_DISCONNECTED,
message:
'You attempted to restore an unfinished workflow, but your Layer 1 wallet got disconnected. Please restart the workflow.',
}),
...standardCancelationPostables(),
]);
restorationErrors() {
let { layer1Network, layer2Network } = this;
let errors = super.restorationErrors();
if (!layer1Network.isConnected) {
errors.push(FAILURE_REASONS.RESTORATION_L1_DISCONNECTED);
}
let persistedLayer1Address = this.session.getValue<string>(
'layer1WalletAddress'
);
if (
layer1Network.isConnected &&
persistedLayer1Address &&
layer1Network.walletInfo.firstAddress !== persistedLayer1Address
) {
errors.push(FAILURE_REASONS.RESTORATION_L1_ADDRESS_CHANGED);
}
if (!layer2Network.isConnected) {
errors.push(FAILURE_REASONS.RESTORATION_L2_DISCONNECTED);
}
let persistedLayer2Address = this.session.getValue<string>(
'layer2WalletAddress'
);
if (
layer2Network.isConnected &&
persistedLayer2Address &&
layer2Network.walletInfo.firstAddress !== persistedLayer2Address
) {
errors.push(FAILURE_REASONS.RESTORATION_L2_ADDRESS_CHANGED);
}
return errors;
}
beforeRestorationChecks() {
return [this.layer1Network.waitForAccount];
}
constructor(owner: unknown, workflowPersistenceId?: string) {
super(owner, workflowPersistenceId);
this.attachWorkflow();
}
}
export default class WithdrawalWorkflowComponent extends Component {
@service declare layer1Network: Layer1Network;
@service declare layer2Network: Layer2Network;
@tracked workflow: WithdrawalWorkflow | null = null;
@service declare router: RouterService;
constructor(owner: unknown, args: {}) {
super(owner, args);
let workflowPersistenceId =
this.router.currentRoute.queryParams['flow-id']!;
let workflow = new WithdrawalWorkflow(
getOwner(this),
workflowPersistenceId
);
this.restore(workflow);
}
async restore(workflow: any) {
await workflow.restore();
this.workflow = workflow;
}
@action onDisconnect() {
this.workflow?.cancel(FAILURE_REASONS.DISCONNECTED);
}
@action onAccountChanged() {
this.workflow?.cancel(FAILURE_REASONS.ACCOUNT_CHANGED);
}
} | the_stack |
import { AdapterBase } from "./AdapterBase";
import { IBoard, ISpace, getConnections, addEventByIndex, addEventToSpaceInternal } from "../boards";
import { Space, BoardType, SpaceSubtype, EventExecutionType, EditorEventActivationType, getEventActivationTypeFromEditorType } from "../types";
import { $$log } from "../utils/debug";
import { createEventInstance } from "../events/events";
import { strings } from "../fs/strings";
import { arrayToArrayBuffer } from "../utils/arrays";
import { strings3 } from "../fs/strings3";
import { toArrayBuffer } from "../utils/image";
import { mainfs } from "../fs/mainfs";
import { toPack } from "../utils/img/ImgPack";
import { BMPfromRGBA } from "../utils/img/BMP";
import { FORM } from "../models/FORM";
import { scenes } from "../fs/scenes";
import { SpaceEventList } from "./eventlist";
import { IBoardInfo } from "./boardinfobase";
import { ChainSplit3 } from "../events/builtin/MP3/U/ChainSplit3";
import { ChainMerge3 } from "../events/builtin/MP3/U/ChainMergeEvent3";
import { ChainMerge } from "../events/builtin/ChainMergeEvent";
import { BankEvent } from "../events/builtin/events.common";
import { createBoardOverlay } from "./MP3.U.boardoverlay";
import { getSoundEffectMapMP3 } from "./MP3.U.soundeffects";
import genericgateImage from "../img/assets/genericgate.png";
import { getImageData } from "../utils/img/getImageData";
import { getEventsInLibrary } from "../events/EventLibrary";
import { EventMap } from "../app/boardState";
export const MP3 = new class MP3Adapter extends AdapterBase {
public gameVersion: 1 | 2 | 3 = 3;
public nintendoLogoFSEntry: number[] = [17, 1];
public hudsonLogoFSEntry: number[] = [17, 2];
public boardDefDirectory: number = 19;
public MAINFS_READ_ADDR: number = 0x00009C10;
public HEAP_FREE_ADDR: number = 0x00009E6C;
public TABLE_HYDRATE_ADDR: number = 0x000EBA60;
onLoad(board: IBoard, boardInfo: IBoardInfo, boardWasStashed: boolean) {
if (!boardWasStashed) {
this._extractBanks(board, boardInfo);
this._extractItemShops(board, boardInfo);
}
}
onCreateBoardOverlay(board: IBoard, boardInfo: IBoardInfo, boardIndex: number, audioIndices: number[]) {
return createBoardOverlay(board, boardInfo, boardIndex, audioIndices);
}
onAfterOverwrite(romView: DataView, board: IBoard, boardInfo: IBoardInfo, boardIndex: number): void {
// Patch game to use all 8MB.
romView.setUint16(0x360EE, 0x8040); // Main heap now starts at 0x80400000
romView.setUint16(0x360F6, (0x00400000 - this.EVENT_MEM_SIZE) >>> 16); // ... and can fill up through reserved event space
romView.setUint16(0x36102, 0x001A); // Temp heap fills as much as 0x1A8000 (8000 is ORed in)
romView.setUint16(0x495D6, 0x001A);
// gamemasterplc: patch both ROM address 0x50DA60 and 0x50DA80 with the value 0x24020001 to fix character unlocks
// gamemasterplc: aka MIPS Instruction ADDIU V0, R0, 0x1
const playerSelectScene = scenes.getDataView(120);
playerSelectScene.setUint32(0xBE60, 0x24020001); // 0x50DA60
playerSelectScene.setUint32(0xBE80, 0x24020001); // 0x50DA80
// This generally fixes duels on happening spaces.
// gamemasterplc: try making 0x00111F04 in ROM 0x10800009 for a temporary fix for question space duels until we figure out events better
const boardPlayScene = scenes.getDataView(128);
//boardPlayScene.setUint32(0x27774, 0x10800009); // 800FE2E4
// The game will soft hang on the first player's turn when the number of plain spaces
// (red/blue) is less than a certain lowish number.
// gamemasterplc says it is related to some save flag check.
// TODO: Waste time figuring out the exact low space threshold or the detailed cause of the bug.
// Hang around 0x800FC664
let blueSpaceCount = 0;
let redSpaceCount = 0;
for (let i = 0; i < board.spaces.length; i++) {
let space = board.spaces[i];
if (space.type === Space.BLUE)
blueSpaceCount++;
if (space.type === Space.RED)
redSpaceCount++;
}
if (blueSpaceCount < 14 || redSpaceCount < 1) {
// Fix low spaces issues
// gamemasterplc: patch ROM offset 0x001101C4 with 0x10000085 to fix low space hangs
boardPlayScene.setUint32(0x25A34, 0x10000085); // Something like BEQ R0 R0 0x85, so it always branches
$$log("Patching for low space count.");
}
}
onOverwritePromises(board: IBoard, boardInfo: IBoardInfo, boardIndex: number) {
let bgIndex = boardInfo.bgDir;
let bgPromises = [
this._writeBackground(bgIndex, board.bg.src, board.bg.width, board.bg.height),
this._writeBackground(bgIndex + 1, board.otherbg.largescene!, 320, 240), // Game start, end
this._writeBackground(bgIndex + 2, board.bg.src, 320, 240), // Overview map
this._writeAdditionalBackgrounds(board),
this.onWriteBoardSelectImg(board, boardInfo),
this.onWriteBoardLogoImg(board, boardInfo), // Various board logos
this.onWriteBoardLogoTextImg(board, boardInfo),
this._onWriteGateImg(board, boardInfo),
this._brandBootSplashscreen(),
];
return Promise.all(bgPromises)
}
onAfterSave(romView: DataView) {
// This patch makes it so the game will boot if the emulator is misconfigured
// with something other than 16K EEPROM save type. Obviously users should
// set the correct save type, but this will let them play at least (with broken saving)
// gamemasterplc: @PartyPlanner64 the jump you had to overwrite at 8000C2C0 is due
// to the game needing 16k eeprom and emulators not setting it for modded roms
romView.setUint32(0x0000CEC0, 0);
// The release ROM has debugger checks in it, which can cause some
// emulators (Nemu64) to be upset. This stops the debugger checks.
romView.setUint32(0x0007FC58, 0); // Don't check if KMC worked...
romView.setUint32(0x0007FC60, 0); // Don't do KMC success action...
// The "return;" is just hit after this and the rest of the checks are skipped.
}
onWriteEvents(board: IBoard) {
}
protected onAddDefaultBoardEvents(editorActivationType: EditorEventActivationType, list: SpaceEventList): void {
if (editorActivationType === EditorEventActivationType.BEFORE_DICE_ROLL) {
const activationType = getEventActivationTypeFromEditorType(editorActivationType);
// MP3 has two "Before Dice Roll" default events.
list.add(activationType, EventExecutionType.DIRECT, "__PP64_INTERNAL_CURSE_POISON_DICEROLL_EVENT");
list.add(activationType, EventExecutionType.DIRECT, "__PP64_INTERNAL_REVERSAL_DICEROLL_EVENT");
}
}
hydrateSpace(space: ISpace, board: IBoard, eventLibrary: EventMap) {
if (space.type === Space.BANK) {
addEventToSpaceInternal(board, space, createEventInstance(BankEvent), false, eventLibrary);
}
}
onChangeBoardSpaceTypesFromGameSpaceTypes(board: IBoard, chains: number[][]) {
let typeMap: { [index: number]: Space };
const isNormalBoard = !board.type || board.type === BoardType.NORMAL;
if (isNormalBoard) {
typeMap = {
0: Space.OTHER, // Sometimes START
3: Space.OTHER,
5: Space.CHANCE,
6: Space.ITEM,
7: Space.BANK,
8: Space.OTHER,
9: Space.BATTLE,
12: Space.BOWSER,
14: Space.STAR,
15: Space.GAMEGUY,
16: Space.OTHER, // Toad
17: Space.OTHER, // Baby Bowser the COHORT
};
}
else if (board.type === BoardType.DUEL) {
typeMap = {
1: Space.OTHER,
2: Space.HAPPENING,
3: Space.GAMEGUY,
4: Space.OTHER, // seen on spaces that have events
5: Space.DUEL_REVERSE,
6: Space.DUEL_BASIC,
7: Space.DUEL_START_RED,
8: Space.MINIGAME,
9: Space.DUEL_START_BLUE,
10: Space.DUEL_POWERUP,
};
}
else {
throw new Error(`Unrecongized board type: ${board.type}`);
}
board.spaces.forEach((space) => {
let newType = typeMap[space.type];
if (newType !== undefined)
space.type = newType;
});
if (isNormalBoard) {
if (chains.length) {
let startSpaceIndex = chains[0][0];
if (!isNaN(startSpaceIndex))
board.spaces[startSpaceIndex].type = Space.START;
}
}
}
onChangeGameSpaceTypesFromBoardSpaceTypes(board: IBoard) {
let typeMap: { [space in Space]: number } = {
[Space.OTHER]: 0,
[Space.BLUE]: 1,
[Space.RED]: 2,
[Space.MINIGAME]: 0, // N/A
[Space.HAPPENING]: 4,
[Space.STAR]: 14,
[Space.CHANCE]: 5,
[Space.START]: 0, // N/A
[Space.SHROOM]: 0, // N/A
[Space.BOWSER]: 12,
[Space.ITEM]: 6,
[Space.BATTLE]: 9,
[Space.BANK]: 7,
[Space.ARROW]: 13,
[Space.GAMEGUY]: 15, // ?
[Space.BLACKSTAR]: 0, // N/A
[Space.DUEL_BASIC]: 0, // N/A
[Space.DUEL_START_BLUE]: 0, // N/A
[Space.DUEL_START_RED]: 0, // N/A
[Space.DUEL_POWERUP]: 0,// N/A
[Space.DUEL_REVERSE]: 0, // N/A
};
board.spaces.forEach((space) => {
let newType = typeMap[space.type];
if (newType !== undefined)
space.type = newType;
});
}
// Creates the chain-based event objects that we abstract out in the UI.
// Override from base to also add reverse shroom events and special chain split.
onCreateChainEvents(board: IBoard, chains: number[][]) {
// There is either a merge or a split at the end of each chain.
for (let i = 0; i < chains.length; i++) {
let chain = chains[i];
// if (chain.length < 2) {
// throw new Error("MP3 onCreateChainEvents assertion failed: chain.length < 2");
// }
let firstSpace = chain[0];
let secondSpace = chain[1];
let lastSpace = chain[chain.length - 1];
let prevSpace = chain[chain.length - 2]; // For MP3
let endLinks = getConnections(lastSpace, board)!;
let event;
if (endLinks.length > 1) {
// A split, figure out the end points.
if (endLinks.length > 2)
throw new Error("MP3 cannot support more than 2 split directions");
let chainIndices: number[] = [];
endLinks.forEach(link => {
chainIndices.push(_getChainWithSpace(link)!);
});
// Create the args, which are more sophisticated / declarative in MP3 (yay)
// The args consist of the space indices and chain indices of the two directions,
// as well as a couple variations of each when they are used with reverse shroom.
let spaceIndexArgs = [];
spaceIndexArgs.push(endLinks[0]); // First two space indices
spaceIndexArgs.push(endLinks[1]);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(prevSpace); // As if returning from first link direction
spaceIndexArgs.push(endLinks[1]);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(prevSpace); // As if returning from 2nd link direction
spaceIndexArgs.push(endLinks[0]);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(0x0000);
let chainArgs = [];
chainArgs.push(chainIndices[0]); // Now the two chain indices and the "indices into the chains"
chainArgs.push(0x0000); // We know these two are always 0 because of how we generate chains.
chainArgs.push(0x0000); // mystery
chainArgs.push(chainIndices[1]);
chainArgs.push(0x0000);
chainArgs.push(0x0000); // mystery
chainArgs.push(i);
chainArgs.push(chain.length - 2); // Return to _near_ end of entering chain
chainArgs.push(0x0001); // mystery
chainArgs.push(chainIndices[1]); // ...yes they flip, for confusion
chainArgs.push(0x0000);
chainArgs.push(0x0000);
chainArgs.push(i);
chainArgs.push(chain.length - 2); // Return to _near_ end of entering chain
chainArgs.push(0x0001);
chainArgs.push(chainIndices[0]);
chainArgs.push(0x0000);
chainArgs.push(0x0000);
let chainWithGate = _needsGateChainSplit(chainIndices);
if (chainWithGate != null) {
event = createEventInstance(ChainSplit3, {
parameterValues: {
spaceIndexArgs,
chainArgs,
hasgate: true,
prevSpace: chains[chainWithGate][0],
altChain: [
chainIndices.find(i => i !== chainWithGate)!, // Chain index
0, // Index in chain
],
},
});
}
else {
event = createEventInstance(ChainSplit3, {
parameterValues: {
spaceIndexArgs,
chainArgs,
},
});
}
addEventByIndex(board, lastSpace, event, true, getEventsInLibrary());
}
else if (endLinks.length > 0) {
event = createEventInstance(ChainMerge3, {
parameterValues: {
chain: _getChainWithSpace(endLinks[0])!,
prevSpace, // For MP3
},
});
addEventByIndex(board, lastSpace, event, true, getEventsInLibrary());
}
// See if we need a reverse split event, reverse chain merge, or safety chain merge.
let pointingSpaces = _getSpacesPointingToSpace(firstSpace);
if (pointingSpaces.length) {
let chainIndices: number[] = [];
pointingSpaces.forEach(link => {
chainIndices.push(_getChainWithSpace(link)!);
});
let pointingChains: number[][] = [];
chainIndices.forEach(index => {
pointingChains.push(chains[index]);
});
if (pointingSpaces.length >= 2) { // Build a reverse split.
// FIXME: This obviously only deals with === 2, but rather than
// restrict boards, we can just arbitrarily not allow going backwards
// in some particular direction(s) for now.
// The reverse args are basically the same, except the 1 bit is placed differently.
let spaceIndexArgs = [];
spaceIndexArgs.push(secondSpace);
spaceIndexArgs.push(pointingSpaces[0]);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(pointingSpaces[1]); // Probably the only indices that matter
spaceIndexArgs.push(pointingSpaces[0]);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(pointingSpaces[1]);
spaceIndexArgs.push(secondSpace);
spaceIndexArgs.push(0xFFFF);
spaceIndexArgs.push(0x0000);
// Now the chain indices and the "indices into the chains"
let chainArgs = [];
chainArgs.push(i);
chainArgs.push(0x0001); // Second space index
chainArgs.push(0x0000);
chainArgs.push(chainIndices[0]);
chainArgs.push(pointingChains[0].length - 1); // Return to end of entering chain
chainArgs.push(0x0001);
chainArgs.push(chainIndices[1]);
chainArgs.push(pointingChains[1].length - 1);
chainArgs.push(0x0001);
chainArgs.push(chainIndices[0]);
chainArgs.push(pointingChains[0].length - 1);
chainArgs.push(0x0001);
chainArgs.push(chainIndices[1]);
chainArgs.push(pointingChains[1].length - 1);
chainArgs.push(0x0001);
chainArgs.push(i);
chainArgs.push(0x0001); // Second space index
chainArgs.push(0x0000);
event = createEventInstance(ChainSplit3, {
parameterValues: {
spaceIndexArgs,
chainArgs,
reverse: true,
},
executionType: EventExecutionType.DIRECT, // Notable difference
});
addEventByIndex(board, firstSpace, event, true, getEventsInLibrary());
}
else if (pointingSpaces.length === 1) { // Build a reverse merge
event = createEventInstance(ChainMerge3, {
parameterValues: {
chain: chainIndices[0], // Go to pointing chain
spaceIndex: pointingChains[0].length - 1, // Go to last space of pointing chain
prevSpace: secondSpace, // The 2nd space of this chain, which would have been previous when going reverse.
},
});
addEventByIndex(board, firstSpace, event, true, getEventsInLibrary());
}
}
else {
// If nothing points to this chain, the player could still reverse their
// way towards the beginning of the chain (start space for example).
// At the start of these chains, we put a type 8 event to spin them around.
// It is redundant when going forward on the chain but doesn't hurt.
let firstLinks = getConnections(firstSpace, board)!;
if (firstLinks.length > 1) {
$$log("FIXME: branching isolated chain?");
}
else if (firstLinks.length > 0) {
// This doesn't crash, but it creates a back forth loop at a dead end.
// This probably will yield issues if the loop is over invisible spaces.
// Only do this if `firstLinks.length > 0`; if this is false, this is a single decorative space.
event = createEventInstance(ChainMerge, { // Not CHAINMERGE3
parameterValues: {
chain: i,
spaceIndex: 1, // Because of chain padding, this should be safe
},
});
event.activationType = EditorEventActivationType.BEGINORWALKOVER;
addEventByIndex(board, firstSpace, event, true, getEventsInLibrary());
}
}
}
function _getChainWithSpace(space: number) {
for (let c = 0; c < chains.length; c++) {
if (chains[c].indexOf(space) >= 0) // Should really be 0 always - game does support supplied index other than 0 though.
return c;
}
}
function _getSpacesPointingToSpace(space: number) {
let pointingSpaces = [];
for (let s = 0; s < board.spaces.length; s++) {
let spaceLinks = getConnections(s, board)!;
if (spaceLinks.indexOf(space) >= 0)
pointingSpaces.push(s);
}
return pointingSpaces;
}
// Returns space index with gate, or undefined
function _chainHasGate(chain: number[]) {
return chain.find(i => {
return board.spaces[i].subtype === SpaceSubtype.GATE;
});
}
// Returns index of chain with gate.
function _needsGateChainSplit(chainIndices: number[]) {
let chainIndex = null;
chainIndices.forEach(index => {
let spaceIndexWithGate = _chainHasGate(chains[index]);
if (typeof spaceIndexWithGate === "number") {
chainIndex = index;
}
});
return chainIndex;
}
}
onParseStrings(board: IBoard, boardInfo: IBoardInfo) {
let strs = boardInfo.str || {};
if (strs.boardSelect) {
if (!Array.isArray(strs.boardSelect))
throw new Error("Expected number[][]");
let idx = strs.boardSelect[0] as number[];
let str = strings3.read("en", idx[0], idx[1]) as string;
let lines = str.split("\n");
// Read the board name and description.
let nameStart = lines[0].indexOf(">") + 2;
let nameEnd = lines[0].indexOf("{", nameStart);
board.name = lines[0].substring(nameStart, nameEnd);
board.description = [lines[1], lines[2]].join("\n");
// Parse difficulty star level
let difficulty = 0;
let lastIndex = str.indexOf(this.getCharacterMap()[0x3B], 0);
while (lastIndex !== -1) {
difficulty++;
lastIndex = str.indexOf(this.getCharacterMap()[0x3B], lastIndex + 1);
}
board.difficulty = difficulty;
}
}
onWriteStrings(board: IBoard, boardInfo: IBoardInfo) {
let strs = boardInfo.str || {};
const boardSelect = strs.boardSelect as number[][];
if (boardSelect && boardSelect.length) {
let bytes = [];
bytes.push(0x0B); // Clear?
bytes.push(0x05); // Start GREEN
bytes.push(0x0F); // ?
bytes = bytes.concat(strings._strToBytes(board.name || ""));
bytes.push(0x16);
bytes.push(0x19);
bytes.push(0x0F);
bytes = bytes.concat([0x20, 0x20, 0x20, 0x20]); // Spaces
bytes.push(0x16);
bytes.push(0x03);
bytes.push(0x0F);
bytes = bytes.concat(strings._strToBytes("Difficulty: "));
let star = 0x3B;
if (board.difficulty > 5 || board.difficulty < 1) { // Hackers!
bytes.push(star);
bytes = bytes.concat(strings._strToBytes(" "));
bytes.push(0x3E); // Little x
bytes = bytes.concat(strings._strToBytes(" " + board.difficulty.toString()));
}
else {
for (let i = 0; i < board.difficulty; i++)
bytes.push(star);
}
bytes.push(0x16);
bytes.push(0x19);
bytes.push(0x0A); // \n
bytes = bytes.concat(strings._strToBytes(board.description || "")); // Assumes \n's are correct within.
bytes.push(0x00); // Null byte
let strBuffer = arrayToArrayBuffer(bytes);
let idx = boardSelect[0];
strings3.write("en", idx[0], idx[1], strBuffer);
// The second copy is mostly the same, but add a couple more bytes at the end.
bytes.pop(); // Null byte
bytes.push(0x19);
bytes.push(0xFF);
bytes.push(0x00); // Null byte
strBuffer = arrayToArrayBuffer(bytes);
idx = boardSelect[1];
strings3.write("en", idx[0], idx[1], strBuffer);
}
if (strs.boardGreeting) {
let bytes = [];
bytes.push(0x0B);
bytes = bytes.concat(strings._strToBytes("You're all here!"));
bytes.push(0x0A); // \n
bytes = bytes.concat(this._createBoardGreetingBase(board.name));
bytes.push(0x0B); // ?
bytes = bytes.concat(strings._strToBytes("Now, before we begin, we need\nto determine the turn order."));
bytes.push(0x19); // ?
bytes.push(0xFF); // ?
bytes.push(0x00); // Null byte
let strBuffer = arrayToArrayBuffer(bytes);
strings3.write("en", strs.boardGreeting[0], strs.boardGreeting[1], strBuffer);
}
if (strs.boardGreetingDuel) {
let bytes = [];
bytes.push(0x0B);
bytes = bytes.concat(strings._strToBytes("I've been waiting for you, "));
bytes.push(0x11); // ?
bytes.push(0xC2); // ?
bytes.push(0x0A); // \n
bytes = bytes.concat(this._createBoardGreetingBase(board.name));
bytes.push(0x0B); // ?
bytes = bytes.concat(strings._strToBytes("And just as promised, if you win here..."));
bytes.push(0x19); // ?
bytes.push(0xFF); // ?
bytes.push(0x00); // Null byte
let strBuffer = arrayToArrayBuffer(bytes);
strings3.write("en", strs.boardGreetingDuel[0], strs.boardGreetingDuel[1], strBuffer);
}
if (strs.boardNames && strs.boardNames.length) {
let bytes = [];
bytes.push(0x0B);
bytes = bytes.concat(strings._strToBytes(board.name));
bytes.push(0x00); // Null byte
let strBuffer = arrayToArrayBuffer(bytes);
for (let i = 0; i < strs.boardNames.length; i++) {
let idx = strs.boardNames[i] as number[];
strings3.write("en", idx[0], idx[1], strBuffer);
}
}
}
_createBoardGreetingBase(boardName: string) {
let bytes = strings._strToBytes("Welcome to the legendary ");
bytes.push(0x05); // Start GREEN
bytes.push(0x0F); // ?
bytes = bytes.concat(strings._strToBytes(boardName));
bytes.push(0x16); // ?
bytes.push(0x19); // ?
bytes.push(0xC2); // ?
bytes.push(0x19); // ?
bytes.push(0xFF); // ?
bytes.push(0x0B); // ?
bytes = bytes.concat(strings._strToBytes("Here, you'll battle to become\nthe Superstar."));
bytes.push(0x19); // ?
bytes.push(0xFF); // ?
return bytes;
}
onParseBoardSelectImg(board: IBoard, boardInfo: IBoardInfo) {
if (!boardInfo.img || !boardInfo.img.boardSelectImg)
return;
board.otherbg.boardselect = this._readImgFromMainFS(20, boardInfo.img.boardSelectImg, 0);
}
onWriteBoardSelectImg(board: IBoard, boardInfo: IBoardInfo): Promise<void> {
return new Promise((resolve, reject) => {
let boardSelectImg = boardInfo.img && boardInfo.img.boardSelectImg;
if (!boardSelectImg) {
resolve();
return;
}
let srcImage = new Image();
let failTimer = setTimeout(() => reject(`Failed to write board select for ${boardInfo.name}`), 45000);
srcImage.onload = () => {
let imgBuffer = toArrayBuffer(srcImage, 64, 64);
// First, read the old image pack.
let oldPack = mainfs.get(20, boardSelectImg!);
// Then, pack the image and write it.
let imgInfoArr = [
{
src: imgBuffer,
width: 64,
height: 64,
bpp: 32,
}
];
let newPack = toPack(imgInfoArr, 16, 0, oldPack);
// saveAs(new Blob([newPack]), "imgpack");
mainfs.write(20, boardSelectImg!, newPack);
clearTimeout(failTimer);
resolve();
};
srcImage.src = board.otherbg.boardselect!;
});
}
onParseBoardLogoImg(board: IBoard, boardInfo: IBoardInfo) {
if (!boardInfo.img || !boardInfo.img.splashLogoImg)
return;
board.otherbg.boardlogo = this._readImgFromMainFS(19, boardInfo.img.splashLogoImg, 0);
board.otherbg.boardlogotext =
this._readImgFromMainFS(19, boardInfo.img.splashLogoTextImg!, 0);
}
onWriteBoardLogoImg(board: IBoard, boardInfo: IBoardInfo): Promise<void> {
return new Promise((resolve, reject) => {
let splashLogoImg = boardInfo.img && boardInfo.img.splashLogoImg;
if (!splashLogoImg) {
resolve();
return;
}
let srcImage = new Image();
let failTimer = setTimeout(() => reject(`Failed to write logos for ${boardInfo.name}`), 45000);
srcImage.onload = () => {
// Write the intro logo images.
let imgBuffer = toArrayBuffer(srcImage, 226, 120);
// First, read the old image pack.
let oldPack = mainfs.get(19, splashLogoImg!);
// Then, pack the image and write it.
let imgInfoArr = [
{
src: imgBuffer,
width: 226,
height: 120,
bpp: 32,
}
];
let newPack = toPack(imgInfoArr, 16, 0, oldPack);
// saveAs(new Blob([newPack]), "imgpack");
mainfs.write(19, splashLogoImg!, newPack);
clearTimeout(failTimer);
resolve();
};
srcImage.src = board.otherbg.boardlogo!;
// Just blank out the pause logo, it is not worth replacing.
let pauseLogoImg = boardInfo.img.pauseLogoImg;
if (pauseLogoImg) {
let oldPack = mainfs.get(19, pauseLogoImg);
let imgInfoArr = [{
src: new ArrayBuffer(150 * 50 * 4),
width: 150,
height: 50,
bpp: 32,
}];
let newPack = toPack(imgInfoArr, 16, 0, oldPack);
mainfs.write(19, pauseLogoImg, newPack);
}
});
}
onWriteBoardLogoTextImg(board: IBoard, boardInfo: IBoardInfo): Promise<void> {
return new Promise((resolve, reject) => {
let splashLogoTextImg = boardInfo.img && boardInfo.img.splashLogoTextImg;
if (!splashLogoTextImg) {
resolve();
return;
}
let srcImage = new Image();
let failTimer = setTimeout(() => reject(`Failed to write logo text for ${boardInfo.name}`), 45000);
srcImage.onload = () => {
// Write the intro logo text image.
let imgBuffer = toArrayBuffer(srcImage, 226, 36);
// First, read the old image pack.
let oldPack = mainfs.get(19, splashLogoTextImg!);
// Then, pack the image and write it.
let imgInfoArr = [
{
src: imgBuffer,
width: 226,
height: 36,
bpp: 32,
}
];
let newPack = toPack(imgInfoArr, 16, 0, oldPack);
// saveAs(new Blob([newPack]), "imgpack");
mainfs.write(19, splashLogoTextImg!, newPack);
clearTimeout(failTimer);
resolve();
};
srcImage.src = board.otherbg.boardlogotext!;
});
}
// Create generic skeleton key gate.
async _onWriteGateImg(board: IBoard, boardInfo: IBoardInfo): Promise<void> {
let gateIndex = boardInfo.img && boardInfo.img.gateImg;
if (!gateIndex) {
return;
}
// We need to write the image onto a canvas to get the RGBA32 values.
let [width, height] = [64, 64];
let failTimer = setTimeout(() => {
throw new Error(`Failed to write gate image for ${boardInfo.name}`);
}, 45000);
const imgData = await getImageData(genericgateImage, width, height);
// First create a BMP
let gateBmp = BMPfromRGBA(imgData.data.buffer, 32, 8);
// Now write the BMP back into the FORM.
let gateFORM = mainfs.get(19, 366); // Always use gate 3 as a base.
let gateUnpacked = FORM.unpack(gateFORM)!;
FORM.replaceBMP(gateUnpacked, 0, gateBmp[0], gateBmp[1]);
// Now write the FORM.
let gatePacked = FORM.pack(gateUnpacked);
//saveAs(new Blob([gatePacked]), "gatePacked");
mainfs.write(19, gateIndex!, gatePacked);
clearTimeout(failTimer);
}
// Writes to 0x800A1904, break 0x8004a520 (JAL 0C012948)
getAudioMap(tableIndex: number): string[] {
return [
"Opening", // 0x00
"Opening Demo",
"Title Screen",
"Select File",
"Castle Grounds",
"", // Two Beeps
"Staff Roll",
"Inside the Castle",
"Free-Play Room",
"Star Lift",
"Preparations",
"Rules Map",
"", // Two Beeps
"The Adventure Begins",
"", // Two Beeps
"The Adventure Ends",
"Begin Mini-Game", // 0x10
"", // Two Beeps
"Here's the Star",
"Still Going",
"Mini-Game End 1",
"Mini-Game End 2",
"Mini-Game End 3",
"Commence Attack!",
"Chilly Waters",
"Deep Bloober Sea",
"Spiny Desert",
"Woody Woods",
"Creepy Cavern",
"Waluigi",
"Good Luck!",
"The Winner is... Me!",
"A Winner is ME!", // 0x20
"Foolish Bowser",
"", // Two Beeps
"", // Two Beeps
"Bowser Event",
"", // Two Beeps
"Bring it On!",
"", // Two Beeps
"Waluigi Appears!",
"VS Millennium Star!",
"Peaceful song, bells",
"", // Two Beeps
"Defeat...",
"Aim",
"Don't Hurry",
"Panic!",
"Fighting Spirit", // 0x30
"Got It?",
"Let's Get a Move On",
"Looking Ahead",
"Big Trouble!",
"What To Do!?!",
"Mustn't Panic",
"Nice and Easy",
"On Your Toes",
"Prologue 1",
"Prologue 2",
"Prologue 3",
"Genie's Theme",
"Jeanie's Theme",
"Start Battle",
"Bang Out a Drum Intro",
"Bang Out a Drum Fill", // 0x40
"Stardust Battle",
"Chance Time",
"Game Guy Mini-Game",
"Item Mini-Game",
"Mushroom Power-Up!",
"Game Guy Winner!",
"Game Guy Loser!",
"Game Guy Dance",
"Drum Roll",
"Sound Config",
"", // Two Beeps
"Battle Room",
"Gamble Room",
"Murmur",
"Tension Drums",
"", // Two Beeps // 0x50
"", // "Chilly Waters splashscreen fanfare" // 0x6E
"", // "Quick fanfare" // 0x70
"", // "Sad fanfare" // 0x71
"", // "Medium fanfare" // 0x72
"", // "Fanfare" // 0x73
];
}
getSoundEffectMap(table: number): string[] {
return getSoundEffectMapMP3(table);
}
// Mostly a MP1 copy for now.
getCharacterMap(): { [num: number]: string } {
return {
0x00: "", // NULL terminator
0x01: "<BLACK>",
0x02: "<DEFAULT>",
0x03: "<RED>",
0x04: "<PURPLE>",
0x05: "<GREEN>",
0x06: "<BLUE>",
0x07: "<YELLOW>",
0x08: "<WHITE>",
0x09: "<SEIZURE>",
0x0A: "\n",
0x0B: "\u3014", // FEED Carriage return / start of bubble?
0x0C: "○", // 2ND BYTE OF PLAYER CHOICE
0x0D: "\t", // UNCONFIRMED / WRONG
0x0E: "\t", // 1ST BYTE OF PLAYER CHOICE
// 0x0F - nothing
// 0x10: " ", works but not used?
0x11: "{0}", // These are format params that get replaced with various things
0x12: "{1}",
0x13: "{2}",
0x14: "{3}",
0x15: "{4}",
0x16: "{5}",
// Theoretically there may be more up through 0x20?
// 0x18 - nothing
0x20: " ",
0x21: "\u3000", // ! A button
0x22: "\u3001", // " B button
0x23: "\u3002", // C-up button
0x24: "\u3003", // C-right button
0x25: "\u3004", // C-left button
0x26: "\u3005", // & C-down button
0x27: "\u3006", // ' Z button
0x28: "\u3007", // ( Analog stick
0x29: "\u3008", // ) (coin)
0x2A: "\u3009", // * Star
0x2B: "\u3010", // , S button
0x2C: "\u3011", // , R button
// 0x2D - nothing
// 0x2E - nothing
// 0x2F - nothing
// 0x30 - 0x39: 0-9 ascii
0x3A: "\u3012", // Hollow coin
0x3B: "\u3013", // Hollow star
0x3C: "+", // <
0x3D: "-", // =
0x3E: "x", // > Little x
0x3F: "->", // Little right ARROW
// 0x40 - nothing
// 0x41 - 0x5A: A-Z ascii
0x5B: "\"", // [ End quotes
0x5C: "'", // \ Single quote
0x5D: "(", // ] Open parenthesis
0x5E: ")",
0x5F: "/", // _
// 0x60 - nothing
// 0x61 - 0x7A: a-z ascii
0x7B: ":", // :
0x7E: "&", // ~
0x80: "\"", // Double quote no angle
0x81: "°", // . Degree
0x82: ",", // ,
0x83: "°", // Low circle FIXME
0x85: ".", // … Period
0xC0: "“", // A`
0xC1: "”", // A'
0xC2: "!", // A^
0xC3: "?", // A~
0xFF: "\u3015", // PAUSE
};
}
}(); | the_stack |
import * as coreHttp from "@azure/core-http";
export interface FileSystemList {
filesystems?: FileSystem[];
}
export interface FileSystem {
name?: string;
lastModified?: Date;
etag?: string;
}
export interface StorageError {
/** The service error response object. */
error?: StorageErrorError;
code?: string;
}
/** The service error response object. */
export interface StorageErrorError {
/** The service error code. */
code?: string;
/** The service error message. */
message?: string;
}
export interface PathList {
paths?: Path[];
}
export interface Path {
name?: string;
isDirectory?: boolean;
lastModified?: Date;
etag?: string;
contentLength?: number;
owner?: string;
group?: string;
permissions?: string;
}
/** An enumeration of blobs */
export interface ListBlobsHierarchySegmentResponse {
serviceEndpoint: string;
containerName: string;
prefix?: string;
marker?: string;
maxResults?: number;
delimiter?: string;
segment: BlobHierarchyListSegment;
nextMarker?: string;
}
export interface BlobHierarchyListSegment {
blobPrefixes?: BlobPrefix[];
blobItems: BlobItemModel[];
}
export interface BlobPrefix {
name: string;
}
/** An Azure Storage blob */
export interface BlobItemModel {
name: string;
deleted: boolean;
snapshot: string;
versionId?: string;
isCurrentVersion?: boolean;
/** Properties of a blob */
properties: BlobPropertiesModel;
deletionId?: string;
}
/** Properties of a blob */
export interface BlobPropertiesModel {
creationTime?: Date;
lastModified: Date;
etag: string;
/** Size in bytes */
contentLength?: number;
contentType?: string;
contentEncoding?: string;
contentLanguage?: string;
contentMD5?: Uint8Array;
contentDisposition?: string;
cacheControl?: string;
blobSequenceNumber?: number;
copyId?: string;
copySource?: string;
copyProgress?: string;
copyCompletionTime?: Date;
copyStatusDescription?: string;
serverEncrypted?: boolean;
incrementalCopy?: boolean;
destinationSnapshot?: string;
deletedTime?: Date;
remainingRetentionDays?: number;
accessTierInferred?: boolean;
customerProvidedKeySha256?: string;
/** The name of the encryption scope under which the blob is encrypted. */
encryptionScope?: string;
accessTierChangeTime?: Date;
tagCount?: number;
expiresOn?: Date;
sealed?: boolean;
lastAccessedOn?: Date;
}
export interface SetAccessControlRecursiveResponse {
directoriesSuccessful?: number;
filesSuccessful?: number;
failureCount?: number;
failedEntries?: AclFailedEntry[];
}
export interface AclFailedEntry {
name?: string;
type?: string;
errorMessage?: string;
}
/** Defines headers for Service_listFileSystems operation. */
export interface ServiceListFileSystemsHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** If the number of filesystems to be listed exceeds the maxResults limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the list operation to continue listing the filesystems. */
continuation?: string;
/** The content type of list filesystem response. The default content type is application/json. */
contentType?: string;
}
/** Defines headers for Service_listFileSystems operation. */
export interface ServiceListFileSystemsExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_create operation. */
export interface FileSystemCreateHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the FileSystem. */
etag?: string;
/** The data and time the filesystem was last modified. Operations on files and directories do not affect the last modified time. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
clientRequestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** A bool string indicates whether the namespace feature is enabled. If "true", the namespace is enabled for the filesystem. */
namespaceEnabled?: string;
}
/** Defines headers for FileSystem_create operation. */
export interface FileSystemCreateExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_setProperties operation. */
export interface FileSystemSetPropertiesHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the filesystem. Changes to filesystem properties affect the entity tag, but operations on files and directories do not. */
etag?: string;
/** The data and time the filesystem was last modified. Changes to filesystem properties update the last modified time, but operations on files and directories do not. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for FileSystem_setProperties operation. */
export interface FileSystemSetPropertiesExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_getProperties operation. */
export interface FileSystemGetPropertiesHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the filesystem. Changes to filesystem properties affect the entity tag, but operations on files and directories do not. */
etag?: string;
/** The data and time the filesystem was last modified. Changes to filesystem properties update the last modified time, but operations on files and directories do not. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** The user-defined properties associated with the filesystem. A comma-separated list of name and value pairs in the format "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. */
properties?: string;
/** A bool string indicates whether the namespace feature is enabled. If "true", the namespace is enabled for the filesystem. */
namespaceEnabled?: string;
}
/** Defines headers for FileSystem_getProperties operation. */
export interface FileSystemGetPropertiesExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_delete operation. */
export interface FileSystemDeleteHeaders {
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
}
/** Defines headers for FileSystem_delete operation. */
export interface FileSystemDeleteExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_listPaths operation. */
export interface FileSystemListPathsHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the filesystem. Changes to filesystem properties affect the entity tag, but operations on files and directories do not. */
etag?: string;
/** The data and time the filesystem was last modified. Changes to filesystem properties update the last modified time, but operations on files and directories do not. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** If the number of paths to be listed exceeds the maxResults limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the list operation to continue listing the paths. */
continuation?: string;
/** Error Code */
errorCode?: string;
}
/** Defines headers for FileSystem_listPaths operation. */
export interface FileSystemListPathsExceptionHeaders {
errorCode?: string;
}
/** Defines headers for FileSystem_listBlobHierarchySegment operation. */
export interface FileSystemListBlobHierarchySegmentHeaders {
/** The media type of the body of the response. For List Blobs this is 'application/xml' */
contentType?: string;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */
requestId?: string;
/** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */
version?: string;
/** UTC date/time value generated by the service that indicates the time at which the response was initiated */
date?: Date;
/** Error Code */
errorCode?: string;
}
/** Defines headers for FileSystem_listBlobHierarchySegment operation. */
export interface FileSystemListBlobHierarchySegmentExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_create operation. */
export interface PathCreateHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** When renaming a directory, the number of paths that are renamed with each invocation is limited. If the number of paths to be renamed exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the rename operation to continue renaming the directory. */
continuation?: string;
/** The size of the resource in bytes. */
contentLength?: number;
/** Error Code */
errorCode?: string;
}
/** Defines headers for Path_create operation. */
export interface PathCreateExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_update operation. */
export interface PathUpdateHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** Indicates that the service supports requests for partial file content. */
acceptRanges?: string;
/** If the Cache-Control request header has previously been set for the resource, that value is returned in this header. */
cacheControl?: string;
/** If the Content-Disposition request header has previously been set for the resource, that value is returned in this header. */
contentDisposition?: string;
/** If the Content-Encoding request header has previously been set for the resource, that value is returned in this header. */
contentEncoding?: string;
/** If the Content-Language request header has previously been set for the resource, that value is returned in this header. */
contentLanguage?: string;
/** The size of the resource in bytes. */
contentLength?: number;
/** Indicates the range of bytes returned in the event that the client requested a subset of the file by setting the Range request header. */
contentRange?: string;
/** The content type specified for the resource. If no content type was specified, the default content type is application/octet-stream. */
contentType?: string;
/** An MD5 hash of the request content. This header is only returned for "Flush" operation. This header is returned so that the client can check for message content integrity. This header refers to the content of the request, not actual file content. */
contentMD5?: string;
/** User-defined properties associated with the file or directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. */
properties?: string;
/** When performing setAccessControlRecursive on a directory, the number of paths that are processed with each invocation is limited. If the number of paths to be processed exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the setAccessControlRecursive operation to continue the setAccessControlRecursive operation on the directory. */
xMsContinuation?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** Error Code */
errorCode?: string;
}
/** Defines headers for Path_update operation. */
export interface PathUpdateExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_lease operation. */
export interface PathLeaseHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file. */
etag?: string;
/** The data and time the file was last modified. Write operations on the file update the last modified time. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** A successful "renew" action returns the lease ID. */
leaseId?: string;
}
/** Defines headers for Path_lease operation. */
export interface PathLeaseExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_read operation. */
export interface PathReadHeaders {
/** Indicates that the service supports requests for partial file content. */
acceptRanges?: string;
/** If the Cache-Control request header has previously been set for the resource, that value is returned in this header. */
cacheControl?: string;
/** If the Content-Disposition request header has previously been set for the resource, that value is returned in this header. */
contentDisposition?: string;
/** If the Content-Encoding request header has previously been set for the resource, that value is returned in this header. */
contentEncoding?: string;
/** If the Content-Language request header has previously been set for the resource, that value is returned in this header. */
contentLanguage?: string;
/** The size of the resource in bytes. */
contentLength?: number;
/** Indicates the range of bytes returned in the event that the client requested a subset of the file by setting the Range request header. */
contentRange?: string;
/** The content type specified for the resource. If no content type was specified, the default content type is application/octet-stream. */
contentType?: string;
/** The MD5 hash of complete file. If the file has an MD5 hash and this read operation is to read the complete file, this response header is returned so that the client can check for message content integrity. */
contentMD5?: string;
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** The type of the resource. The value may be "file" or "directory". If not set, the value is "file". */
resourceType?: string;
/** The user-defined properties associated with the file or directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. */
properties?: string;
/** When a resource is leased, specifies whether the lease is of infinite or fixed duration. */
leaseDuration?: string;
/** Lease state of the resource. */
leaseState?: string;
/** The lease status of the resource. */
leaseStatus?: string;
}
/** Defines headers for Path_read operation. */
export interface PathReadExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_getProperties operation. */
export interface PathGetPropertiesHeaders {
/** Indicates that the service supports requests for partial file content. */
acceptRanges?: string;
/** If the Cache-Control request header has previously been set for the resource, that value is returned in this header. */
cacheControl?: string;
/** If the Content-Disposition request header has previously been set for the resource, that value is returned in this header. */
contentDisposition?: string;
/** If the Content-Encoding request header has previously been set for the resource, that value is returned in this header. */
contentEncoding?: string;
/** If the Content-Language request header has previously been set for the resource, that value is returned in this header. */
contentLanguage?: string;
/** The size of the resource in bytes. */
contentLength?: number;
/** Indicates the range of bytes returned in the event that the client requested a subset of the file by setting the Range request header. */
contentRange?: string;
/** The content type specified for the resource. If no content type was specified, the default content type is application/octet-stream. */
contentType?: string;
/** The MD5 hash of complete file stored in storage. This header is returned only for "GetProperties" operation. If the Content-MD5 header has been set for the file, this response header is returned for GetProperties call so that the client can check for message content integrity. */
contentMD5?: string;
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** The type of the resource. The value may be "file" or "directory". If not set, the value is "file". */
resourceType?: string;
/** The user-defined properties associated with the file or directory, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. */
properties?: string;
/** The owner of the file or directory. Included in the response if Hierarchical Namespace is enabled for the account. */
owner?: string;
/** The owning group of the file or directory. Included in the response if Hierarchical Namespace is enabled for the account. */
group?: string;
/** The POSIX access permissions for the file owner, the file owning group, and others. Included in the response if Hierarchical Namespace is enabled for the account. */
permissions?: string;
/** The POSIX access control list for the file or directory. Included in the response only if the action is "getAccessControl" and Hierarchical Namespace is enabled for the account. */
acl?: string;
/** When a resource is leased, specifies whether the lease is of infinite or fixed duration. */
leaseDuration?: string;
/** Lease state of the resource. */
leaseState?: string;
/** The lease status of the resource. */
leaseStatus?: string;
/** Error Code */
errorCode?: string;
}
/** Defines headers for Path_getProperties operation. */
export interface PathGetPropertiesExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_delete operation. */
export interface PathDeleteHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** Returned only for hierarchical namespace space enabled accounts when soft delete is enabled. A unique identifier for the entity that can be used to restore it. See the Undelete REST API for more information. */
deletionId?: string;
/** Error Code */
errorCode?: string;
}
/** Defines headers for Path_delete operation. */
export interface PathDeleteExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_setAccessControl operation. */
export interface PathSetAccessControlHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_setAccessControl operation. */
export interface PathSetAccessControlExceptionHeaders {
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_setAccessControlRecursive operation. */
export interface PathSetAccessControlRecursiveHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** When performing setAccessControlRecursive on a directory, the number of paths that are processed with each invocation is limited. If the number of paths to be processed exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the setAccessControlRecursive operation to continue the setAccessControlRecursive operation on the directory. */
continuation?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_setAccessControlRecursive operation. */
export interface PathSetAccessControlRecursiveExceptionHeaders {
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_flushData operation. */
export interface PathFlushDataHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** The data and time the file or directory was last modified. Write operations on the file or directory update the last modified time. */
lastModified?: Date;
/** The size of the resource in bytes. */
contentLength?: number;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_flushData operation. */
export interface PathFlushDataExceptionHeaders {
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_appendData operation. */
export interface PathAppendDataHeaders {
/** A UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
/** An HTTP entity tag associated with the file or directory. */
etag?: string;
/** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */
contentMD5?: Uint8Array;
/** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */
xMsContentCrc64?: Uint8Array;
/** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */
isServerEncrypted?: boolean;
}
/** Defines headers for Path_appendData operation. */
export interface PathAppendDataExceptionHeaders {
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** A server-generated UUID recorded in the analytics logs for troubleshooting and correlation. */
requestId?: string;
/** The version of the REST protocol used to process the request. */
version?: string;
}
/** Defines headers for Path_setExpiry operation. */
export interface PathSetExpiryHeaders {
/** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */
etag?: string;
/** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */
lastModified?: Date;
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */
requestId?: string;
/** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */
version?: string;
/** UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
/** Error Code */
errorCode?: string;
}
/** Defines headers for Path_setExpiry operation. */
export interface PathSetExpiryExceptionHeaders {
errorCode?: string;
}
/** Defines headers for Path_undelete operation. */
export interface PathUndeleteHeaders {
/** If a client request id header is sent in the request, this header will be present in the response with the same value. */
clientRequestId?: string;
/** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */
requestId?: string;
/** The type of the resource. The value may be "file" or "directory". If not set, the value is "file". */
resourceType?: string;
/** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */
version?: string;
/** UTC date/time value generated by the service that indicates the time at which the response was initiated. */
date?: Date;
}
/** Defines headers for Path_undelete operation. */
export interface PathUndeleteExceptionHeaders {
errorCode?: string;
}
/** Parameter group */
export interface ModifiedAccessConditions {
/** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */
ifModifiedSince?: Date;
/** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */
ifUnmodifiedSince?: Date;
/** Specify an ETag value to operate only on blobs with a matching value. */
ifMatch?: string;
/** Specify an ETag value to operate only on blobs without a matching value. */
ifNoneMatch?: string;
}
/** Parameter group */
export interface PathHttpHeaders {
/** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */
cacheControl?: string;
/** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */
contentEncoding?: string;
/** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */
contentLanguage?: string;
/** Optional. Sets the blob's Content-Disposition header. */
contentDisposition?: string;
/** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */
contentType?: string;
/** Specify the transactional md5 for the body, to be validated by the service. */
contentMD5?: Uint8Array;
/** Specify the transactional md5 for the body, to be validated by the service. */
transactionalContentHash?: Uint8Array;
}
/** Parameter group */
export interface LeaseAccessConditions {
/** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */
leaseId?: string;
}
/** Parameter group */
export interface SourceModifiedAccessConditions {
/** Specify an ETag value to operate only on blobs with a matching value. */
sourceIfMatch?: string;
/** Specify an ETag value to operate only on blobs without a matching value. */
sourceIfNoneMatch?: string;
/** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */
sourceIfModifiedSince?: Date;
/** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */
sourceIfUnmodifiedSince?: Date;
}
/** Defines values for ListBlobsIncludeItem. */
export type ListBlobsIncludeItem =
| "copy"
| "deleted"
| "metadata"
| "snapshots"
| "uncommittedblobs"
| "versions"
| "tags";
/** Defines values for PathResourceType. */
export type PathResourceType = "directory" | "file";
/** Defines values for PathRenameMode. */
export type PathRenameMode = "legacy" | "posix";
/** Defines values for PathUpdateAction. */
export type PathUpdateAction =
| "append"
| "flush"
| "setProperties"
| "setAccessControl"
| "setAccessControlRecursive";
/** Defines values for PathSetAccessControlRecursiveMode. */
export type PathSetAccessControlRecursiveMode = "set" | "modify" | "remove";
/** Defines values for PathLeaseAction. */
export type PathLeaseAction =
| "acquire"
| "break"
| "change"
| "renew"
| "release";
/** Defines values for PathGetPropertiesAction. */
export type PathGetPropertiesAction = "getAccessControl" | "getStatus";
/** Defines values for PathExpiryOptions. */
export type PathExpiryOptions =
| "NeverExpire"
| "RelativeToCreation"
| "RelativeToNow"
| "Absolute";
/** Optional parameters. */
export interface ServiceListFileSystemsOptionalParams
extends coreHttp.OperationOptions {
/** Filters results to filesystems within the specified prefix. */
prefix?: string;
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** An optional value that specifies the maximum number of items to return. If omitted or greater than 5,000, the response will include up to 5,000 items. */
maxResults?: number;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
}
/** Contains response data for the listFileSystems operation. */
export type ServiceListFileSystemsResponse = ServiceListFileSystemsHeaders &
FileSystemList & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The response body as text (string format) */
bodyAsText: string;
/** The response body as parsed JSON or XML */
parsedBody: FileSystemList;
/** The parsed HTTP response headers. */
parsedHeaders: ServiceListFileSystemsHeaders;
};
};
/** Optional parameters. */
export interface FileSystemCreateOptionalParams
extends coreHttp.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. User-defined properties to be stored with the filesystem, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. If the filesystem exists, any properties not included in the list will be removed. All properties are removed if the header is omitted. To merge new and existing properties, first get all existing properties and the current E-Tag, then make a conditional request with the E-Tag and include values for all properties. */
properties?: string;
}
/** Contains response data for the create operation. */
export type FileSystemCreateResponse = FileSystemCreateHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemCreateHeaders;
};
};
/** Optional parameters. */
export interface FileSystemSetPropertiesOptionalParams
extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. User-defined properties to be stored with the filesystem, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. If the filesystem exists, any properties not included in the list will be removed. All properties are removed if the header is omitted. To merge new and existing properties, first get all existing properties and the current E-Tag, then make a conditional request with the E-Tag and include values for all properties. */
properties?: string;
}
/** Contains response data for the setProperties operation. */
export type FileSystemSetPropertiesResponse = FileSystemSetPropertiesHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemSetPropertiesHeaders;
};
};
/** Optional parameters. */
export interface FileSystemGetPropertiesOptionalParams
extends coreHttp.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
}
/** Contains response data for the getProperties operation. */
export type FileSystemGetPropertiesResponse = FileSystemGetPropertiesHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemGetPropertiesHeaders;
};
};
/** Optional parameters. */
export interface FileSystemDeleteOptionalParams
extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
}
/** Contains response data for the delete operation. */
export type FileSystemDeleteResponse = FileSystemDeleteHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemDeleteHeaders;
};
};
/** Optional parameters. */
export interface FileSystemListPathsOptionalParams
extends coreHttp.OperationOptions {
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** An optional value that specifies the maximum number of items to return. If omitted or greater than 5,000, the response will include up to 5,000 items. */
maxResults?: number;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. Filters results to paths within the specified directory. An error occurs if the directory does not exist. */
path?: string;
/** Optional. Valid only when Hierarchical Namespace is enabled for the account. If "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If "false", the values will be returned as Azure Active Directory Object IDs. The default value is false. Note that group and application Object IDs are not translated because they do not have unique friendly names. */
upn?: boolean;
}
/** Contains response data for the listPaths operation. */
export type FileSystemListPathsResponse = FileSystemListPathsHeaders &
PathList & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The response body as text (string format) */
bodyAsText: string;
/** The response body as parsed JSON or XML */
parsedBody: PathList;
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemListPathsHeaders;
};
};
/** Optional parameters. */
export interface FileSystemListBlobHierarchySegmentOptionalParams
extends coreHttp.OperationOptions {
/** Filters results to filesystems within the specified prefix. */
prefix?: string;
/** An optional value that specifies the maximum number of items to return. If omitted or greater than 5,000, the response will include up to 5,000 items. */
maxResults?: number;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** When the request includes this parameter, the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character. The delimiter may be a single character or a string. */
delimiter?: string;
/** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the NextMarker value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */
marker?: string;
/** Include this parameter to specify one or more datasets to include in the response. */
include?: ListBlobsIncludeItem[];
}
/** Contains response data for the listBlobHierarchySegment operation. */
export type FileSystemListBlobHierarchySegmentResponse = FileSystemListBlobHierarchySegmentHeaders &
ListBlobsHierarchySegmentResponse & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The response body as text (string format) */
bodyAsText: string;
/** The response body as parsed JSON or XML */
parsedBody: ListBlobsHierarchySegmentResponse;
/** The parsed HTTP response headers. */
parsedHeaders: FileSystemListBlobHierarchySegmentHeaders;
};
};
/** Optional parameters. */
export interface PathCreateOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
pathHttpHeaders?: PathHttpHeaders;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Parameter group */
sourceModifiedAccessConditions?: SourceModifiedAccessConditions;
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. User-defined properties to be stored with the filesystem, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. If the filesystem exists, any properties not included in the list will be removed. All properties are removed if the header is omitted. To merge new and existing properties, first get all existing properties and the current E-Tag, then make a conditional request with the E-Tag and include values for all properties. */
properties?: string;
/** Required only for Create File and Create Directory. The value must be "file" or "directory". */
resource?: PathResourceType;
/** Optional. Valid only when namespace is enabled. This parameter determines the behavior of the rename operation. The value must be "legacy" or "posix", and the default value will be "posix". */
mode?: PathRenameMode;
/** An optional file or directory to be renamed. The value must have the following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties will overwrite the existing properties; otherwise, the existing properties will be preserved. This value must be a URL percent-encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. */
renameSource?: string;
/** A lease ID for the source path. If specified, the source path must have an active lease and the lease ID must match. */
sourceLeaseId?: string;
/** Optional and only valid if Hierarchical Namespace is enabled for the account. Sets POSIX access permissions for the file owner, the file owning group, and others. Each class may be granted read, write, or execute permission. The sticky bit is also supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. */
permissions?: string;
/** Optional and only valid if Hierarchical Namespace is enabled for the account. When creating a file or directory and the parent folder does not have a default ACL, the umask restricts the permissions of the file or directory to be created. The resulting permission is given by p bitwise and not u, where p is the permission and u is the umask. For example, if p is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777 for a directory and 0666 for a file. The default umask is 0027. The umask must be specified in 4-digit octal notation (e.g. 0766). */
umask?: string;
}
/** Contains response data for the create operation. */
export type PathCreateResponse = PathCreateHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathCreateHeaders;
};
};
/** Optional parameters. */
export interface PathUpdateOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
pathHttpHeaders?: PathHttpHeaders;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Optional. The number of paths processed with each invocation is limited. If the number of paths to be processed exceeds this limit, a continuation token is returned in the response header x-ms-continuation. When a continuation token is returned in the response, it must be percent-encoded and specified in a subsequent invocation of setAccessControlRecursive operation. */
continuation?: string;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. User-defined properties to be stored with the filesystem, in the format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value is a base64 encoded string. Note that the string may only contain ASCII characters in the ISO-8859-1 character set. If the filesystem exists, any properties not included in the list will be removed. All properties are removed if the header is omitted. To merge new and existing properties, first get all existing properties and the current E-Tag, then make a conditional request with the E-Tag and include values for all properties. */
properties?: string;
/** Optional and only valid if Hierarchical Namespace is enabled for the account. Sets POSIX access permissions for the file owner, the file owning group, and others. Each class may be granted read, write, or execute permission. The sticky bit is also supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. */
permissions?: string;
/** Optional. Valid for "SetAccessControlRecursive" operation. It specifies the maximum number of files or directories on which the acl change will be applied. If omitted or greater than 2,000, the request will process up to 2,000 items */
maxRecords?: number;
/** Optional. Valid for "SetAccessControlRecursive" operation. If set to false, the operation will terminate quickly on encountering user errors (4XX). If true, the operation will ignore user errors and proceed with the operation on other sub-entities of the directory. Continuation token will only be returned when forceFlag is true in case of user errors. If not set the default value is false for this. */
forceFlag?: boolean;
/** This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file. It is required when uploading data to be appended to the file and when flushing previously uploaded data to the file. The value must be the position where the data is to be appended. Uploaded data is not immediately flushed, or written, to the file. To flush, the previously uploaded data must be contiguous, the position parameter must be specified and equal to the length of the file after all data has been written, and there must not be a request entity body included with the request. */
position?: number;
/** Valid only for flush operations. If "true", uncommitted data is retained after the flush operation completes; otherwise, the uncommitted data is deleted after the flush operation. The default is false. Data at offsets less than the specified position are written to the file when flush succeeds, but this optional parameter allows data after the flush position to be retained for a future flush operation. */
retainUncommittedData?: boolean;
/** Azure Storage Events allow applications to receive notifications when files change. When Azure Storage Events are enabled, a file changed event is raised. This event has a property indicating whether this is the final change to distinguish the difference between an intermediate flush to a file stream and the final close of a file stream. The close query parameter is valid only when the action is "flush" and change notifications are enabled. If the value of close is "true" and the flush operation completes successfully, the service raises a file change notification with a property indicating that this is the final update (the file stream has been closed). If "false" a change notification is raised indicating the file has changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to indicate that the file stream has been closed." */
close?: boolean;
/** Required for "Append Data" and "Flush Data". Must be 0 for "Flush Data". Must be the length of the request content in bytes for "Append Data". */
contentLength?: number;
/** Optional. The owner of the blob or directory. */
owner?: string;
/** Optional. The owning group of the blob or directory. */
group?: string;
/** Sets POSIX access control rights on files and directories. The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". */
acl?: string;
}
/** Contains response data for the update operation. */
export type PathUpdateResponse = PathUpdateHeaders &
SetAccessControlRecursiveResponse & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The response body as text (string format) */
bodyAsText: string;
/** The response body as parsed JSON or XML */
parsedBody: SetAccessControlRecursiveResponse;
/** The parsed HTTP response headers. */
parsedHeaders: PathUpdateHeaders;
};
};
/** Optional parameters. */
export interface PathLeaseOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** The lease duration is required to acquire a lease, and specifies the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 for infinite lease. */
xMsLeaseDuration?: number;
/** The lease break period duration is optional to break a lease, and specifies the break period of the lease in seconds. The lease break duration must be between 0 and 60 seconds. */
xMsLeaseBreakPeriod?: number;
/** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */
proposedLeaseId?: string;
}
/** Contains response data for the lease operation. */
export type PathLeaseResponse = PathLeaseHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathLeaseHeaders;
};
};
/** Optional parameters. */
export interface PathReadOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** The HTTP Range request header specifies one or more byte ranges of the resource to be retrieved. */
range?: string;
/** Optional. When this header is set to "true" and specified together with the Range header, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4MB in size. If this header is specified without the Range header, the service returns status code 400 (Bad Request). If this header is set to true when the range exceeds 4 MB in size, the service returns status code 400 (Bad Request). */
xMsRangeGetContentMd5?: boolean;
}
/** Contains response data for the read operation. */
export type PathReadResponse = PathReadHeaders & {
/**
* BROWSER ONLY
*
* The response body as a browser Blob.
* Always `undefined` in node.js.
*/
blobBody?: Promise<Blob>;
/**
* NODEJS ONLY
*
* The response body as a node.js Readable stream.
* Always `undefined` in the browser.
*/
readableStreamBody?: NodeJS.ReadableStream;
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathReadHeaders;
};
};
/** Optional parameters. */
export interface PathGetPropertiesOptionalParams
extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. Valid only when Hierarchical Namespace is enabled for the account. If "true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If "false", the values will be returned as Azure Active Directory Object IDs. The default value is false. Note that group and application Object IDs are not translated because they do not have unique friendly names. */
upn?: boolean;
/** Optional. If the value is "getStatus" only the system defined properties for the path are returned. If the value is "getAccessControl" the access control list is returned in the response headers (Hierarchical Namespace must be enabled for the account), otherwise the properties are returned. */
action?: PathGetPropertiesAction;
}
/** Contains response data for the getProperties operation. */
export type PathGetPropertiesResponse = PathGetPropertiesHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathGetPropertiesHeaders;
};
};
/** Optional parameters. */
export interface PathDeleteOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Required */
recursive?: boolean;
}
/** Contains response data for the delete operation. */
export type PathDeleteResponse = PathDeleteHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathDeleteHeaders;
};
};
/** Optional parameters. */
export interface PathSetAccessControlOptionalParams
extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional and only valid if Hierarchical Namespace is enabled for the account. Sets POSIX access permissions for the file owner, the file owning group, and others. Each class may be granted read, write, or execute permission. The sticky bit is also supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported. */
permissions?: string;
/** Optional. The owner of the blob or directory. */
owner?: string;
/** Optional. The owning group of the blob or directory. */
group?: string;
/** Sets POSIX access control rights on files and directories. The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". */
acl?: string;
}
/** Contains response data for the setAccessControl operation. */
export type PathSetAccessControlResponse = PathSetAccessControlHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathSetAccessControlHeaders;
};
};
/** Optional parameters. */
export interface PathSetAccessControlRecursiveOptionalParams
extends coreHttp.OperationOptions {
/** Optional. When deleting a directory, the number of paths that are deleted with each invocation is limited. If the number of paths to be deleted exceeds this limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the delete operation to continue deleting the directory. */
continuation?: string;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Optional. It specifies the maximum number of files or directories on which the acl change will be applied. If omitted or greater than 2,000, the request will process up to 2,000 items */
maxRecords?: number;
/** Optional. Valid for "SetAccessControlRecursive" operation. If set to false, the operation will terminate quickly on encountering user errors (4XX). If true, the operation will ignore user errors and proceed with the operation on other sub-entities of the directory. Continuation token will only be returned when forceFlag is true in case of user errors. If not set the default value is false for this. */
forceFlag?: boolean;
/** Sets POSIX access control rights on files and directories. The value is a comma-separated list of access control entries. Each access control entry (ACE) consists of a scope, a type, a user or group identifier, and permissions in the format "[scope:][type]:[id]:[permissions]". */
acl?: string;
}
/** Contains response data for the setAccessControlRecursive operation. */
export type PathSetAccessControlRecursiveResponse = PathSetAccessControlRecursiveHeaders &
SetAccessControlRecursiveResponse & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The response body as text (string format) */
bodyAsText: string;
/** The response body as parsed JSON or XML */
parsedBody: SetAccessControlRecursiveResponse;
/** The parsed HTTP response headers. */
parsedHeaders: PathSetAccessControlRecursiveHeaders;
};
};
/** Optional parameters. */
export interface PathFlushDataOptionalParams extends coreHttp.OperationOptions {
/** Parameter group */
modifiedAccessConditions?: ModifiedAccessConditions;
/** Parameter group */
pathHttpHeaders?: PathHttpHeaders;
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file. It is required when uploading data to be appended to the file and when flushing previously uploaded data to the file. The value must be the position where the data is to be appended. Uploaded data is not immediately flushed, or written, to the file. To flush, the previously uploaded data must be contiguous, the position parameter must be specified and equal to the length of the file after all data has been written, and there must not be a request entity body included with the request. */
position?: number;
/** Valid only for flush operations. If "true", uncommitted data is retained after the flush operation completes; otherwise, the uncommitted data is deleted after the flush operation. The default is false. Data at offsets less than the specified position are written to the file when flush succeeds, but this optional parameter allows data after the flush position to be retained for a future flush operation. */
retainUncommittedData?: boolean;
/** Azure Storage Events allow applications to receive notifications when files change. When Azure Storage Events are enabled, a file changed event is raised. This event has a property indicating whether this is the final change to distinguish the difference between an intermediate flush to a file stream and the final close of a file stream. The close query parameter is valid only when the action is "flush" and change notifications are enabled. If the value of close is "true" and the flush operation completes successfully, the service raises a file change notification with a property indicating that this is the final update (the file stream has been closed). If "false" a change notification is raised indicating the file has changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to indicate that the file stream has been closed." */
close?: boolean;
/** Required for "Append Data" and "Flush Data". Must be 0 for "Flush Data". Must be the length of the request content in bytes for "Append Data". */
contentLength?: number;
}
/** Contains response data for the flushData operation. */
export type PathFlushDataResponse = PathFlushDataHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathFlushDataHeaders;
};
};
/** Optional parameters. */
export interface PathAppendDataOptionalParams
extends coreHttp.OperationOptions {
/** Parameter group */
leaseAccessConditions?: LeaseAccessConditions;
/** Parameter group */
pathHttpHeaders?: PathHttpHeaders;
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file. It is required when uploading data to be appended to the file and when flushing previously uploaded data to the file. The value must be the position where the data is to be appended. Uploaded data is not immediately flushed, or written, to the file. To flush, the previously uploaded data must be contiguous, the position parameter must be specified and equal to the length of the file after all data has been written, and there must not be a request entity body included with the request. */
position?: number;
/** Required for "Append Data" and "Flush Data". Must be 0 for "Flush Data". Must be the length of the request content in bytes for "Append Data". */
contentLength?: number;
/** Specify the transactional crc64 for the body, to be validated by the service. */
transactionalContentCrc64?: Uint8Array;
}
/** Contains response data for the appendData operation. */
export type PathAppendDataResponse = PathAppendDataHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathAppendDataHeaders;
};
};
/** Optional parameters. */
export interface PathSetExpiryOptionalParams extends coreHttp.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** The time to set the blob to expiry */
expiresOn?: string;
}
/** Contains response data for the setExpiry operation. */
export type PathSetExpiryResponse = PathSetExpiryHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathSetExpiryHeaders;
};
};
/** Optional parameters. */
export interface PathUndeleteOptionalParams extends coreHttp.OperationOptions {
/** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */
requestId?: string;
/** The timeout parameter is expressed in seconds. For more information, see <a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a> */
timeout?: number;
/** Only for hierarchical namespace enabled accounts. Optional. The path of the soft deleted blob to undelete. */
undeleteSource?: string;
}
/** Contains response data for the undelete operation. */
export type PathUndeleteResponse = PathUndeleteHeaders & {
/** The underlying HTTP response. */
_response: coreHttp.HttpResponse & {
/** The parsed HTTP response headers. */
parsedHeaders: PathUndeleteHeaders;
};
};
/** Optional parameters. */
export interface StorageClientOptionalParams
extends coreHttp.ServiceClientOptions {
/** Specifies the version of the operation to use for this request. */
version?: string;
/** The value must be "filesystem" for all filesystem operations. */
resource?: string;
/** Overrides client endpoint. */
endpoint?: string;
} | the_stack |
import * as path from 'path';
import { Project, ts, PropertyDeclaration, SyntaxKind, VariableDeclaration } from 'ts-morph';
import FileEngine from '../app/engines/file.engine';
const ast = new Project();
export class ImportsUtil {
private static instance: ImportsUtil;
private constructor() {}
public static getInstance() {
if (!ImportsUtil.instance) {
ImportsUtil.instance = new ImportsUtil();
}
return ImportsUtil.instance;
}
/**
* Find for a sourceFile a variable value in a local enum
* @param srcFile
* @param variableName
* @param variableValue
*/
private findInEnums(srcFile, variableName: string, variableValue: string) {
let res = '';
srcFile.getEnum(e => {
if (e.getName() === variableName) {
e.getMember(m => {
if (m.getName() === variableValue) {
res = m.getValue();
}
});
}
});
return res;
}
/**
* Find for a sourceFile a variable value in a local static class
* @param srcFile
* @param variableName
* @param variableValue
*/
private findInClasses(srcFile, variableName: string, variableValue: string) {
let res = '';
srcFile.getClass(c => {
let staticProperty: PropertyDeclaration = c.getStaticProperty(variableValue);
if (staticProperty) {
if (staticProperty.getInitializer()) {
res = staticProperty.getInitializer().getText();
}
}
});
return res;
}
/**
* Find a value in a local variable declaration like an object
* @param variableDeclaration
* @param variablesAttributes
*/
private findInObjectVariableDeclaration(variableDeclaration, variablesAttributes) {
let variableKind = variableDeclaration.getKind();
if (variableKind && variableKind === SyntaxKind.VariableDeclaration) {
let initializer = variableDeclaration.getInitializer();
if (initializer) {
let initializerKind = initializer.getKind();
if (initializerKind && initializerKind === SyntaxKind.ObjectLiteralExpression) {
let compilerNode = initializer.compilerNode as ts.ObjectLiteralExpression,
finalValue = '';
// Find thestring from AVAR.BVAR.thestring inside properties
let depth = 0;
let loopProperties = properties => {
properties.forEach(prop => {
if (prop.name) {
if (variablesAttributes[depth + 1]) {
if (prop.name.getText() === variablesAttributes[depth + 1]) {
if (prop.initializer) {
if (prop.initializer.properties) {
depth += 1;
loopProperties(prop.initializer.properties);
} else {
finalValue = prop.initializer.text;
}
} else {
finalValue = prop.initializer.text;
}
}
}
}
});
};
loopProperties(compilerNode.properties);
return finalValue;
}
}
}
}
/**
* Find in imports something like myvar
* @param {string} inputVariableName like myvar
* @return {[type]} myvar value
*/
public findValueInImportOrLocalVariables(
inputVariableName: string,
sourceFile: ts.SourceFile,
decoratorType?: string
) {
let metadataVariableName = inputVariableName,
searchedImport,
aliasOriginalName = '',
foundWithNamedImport = false,
foundWithDefaultImport = false,
foundWithAlias = false;
const file =
typeof ast.getSourceFile(sourceFile.fileName) !== 'undefined'
? ast.getSourceFile(sourceFile.fileName)
: ast.addSourceFileAtPathIfExists(sourceFile.fileName); // tslint:disable-line
const imports = file.getImportDeclarations();
/**
* Loop through all imports, and find one matching inputVariableName
*/
imports.forEach(i => {
let namedImports = i.getNamedImports(),
namedImportsLength = namedImports.length,
j = 0;
if (namedImportsLength > 0) {
for (j; j < namedImportsLength; j++) {
let importName = namedImports[j].getNameNode().getText() as string,
importAlias;
if (namedImports[j].getAliasNode()) {
importAlias = namedImports[j].getAliasNode().getText();
}
if (importName === metadataVariableName) {
foundWithNamedImport = true;
searchedImport = i;
break;
}
if (importAlias === metadataVariableName) {
foundWithNamedImport = true;
foundWithAlias = true;
aliasOriginalName = importName;
searchedImport = i;
break;
}
}
}
const namespaceImport = i.getNamespaceImport();
if (namespaceImport) {
const namespaceImportLocalName = namespaceImport.getText();
if (namespaceImportLocalName === metadataVariableName) {
searchedImport = i;
}
}
if (!foundWithNamedImport) {
const defaultImport = i.getDefaultImport();
if (defaultImport) {
const defaultImportText = defaultImport.getText();
if (defaultImportText === metadataVariableName) {
foundWithDefaultImport = true;
searchedImport = i;
}
}
}
});
function hasFoundValues(variableDeclaration) {
let variableKind = variableDeclaration.getKind();
if (variableKind && variableKind === SyntaxKind.VariableDeclaration) {
let initializer = variableDeclaration.getInitializer();
if (initializer) {
let initializerKind = initializer.getKind();
if (initializerKind && initializerKind === SyntaxKind.ObjectLiteralExpression) {
let compilerNode = initializer.compilerNode as ts.ObjectLiteralExpression;
return compilerNode.properties;
}
}
}
}
if (typeof searchedImport !== 'undefined') {
let importPathReference = searchedImport.getModuleSpecifierSourceFile();
let importPath;
if (typeof importPathReference !== 'undefined') {
importPath = importPathReference.compilerNode.fileName;
const sourceFileImport =
typeof ast.getSourceFile(importPath) !== 'undefined'
? ast.getSourceFile(importPath)
: ast.addSourceFileAtPathIfExists(importPath); // tslint:disable-line
if (sourceFileImport) {
let variableName = foundWithAlias ? aliasOriginalName : metadataVariableName;
let variableDeclaration = sourceFileImport.getVariableDeclaration(variableName);
if (variableDeclaration) {
return hasFoundValues(variableDeclaration);
} else {
// Try with exports
const exportDeclarations = sourceFileImport.getExportedDeclarations();
if (exportDeclarations && exportDeclarations.size > 0) {
for (const [
exportDeclarationKey,
exportDeclarationValues
] of exportDeclarations) {
exportDeclarationValues.forEach(exportDeclarationValue => {
if (
exportDeclarationValue instanceof VariableDeclaration &&
exportDeclarationValue.getName() === variableName
) {
return hasFoundValues(exportDeclarationValue);
}
});
}
}
}
}
}
if (
!importPathReference &&
decoratorType === 'template' &&
searchedImport.getModuleSpecifierValue().indexOf('.html') !== -1
) {
const originalSourceFilePath = sourceFile.path;
const originalSourceFilePathFolder = originalSourceFilePath.substring(
0,
originalSourceFilePath.lastIndexOf('/')
);
const finalImportedPath =
originalSourceFilePathFolder + '/' + searchedImport.getModuleSpecifierValue();
const finalImportedPathData = FileEngine.getSync(finalImportedPath);
return finalImportedPathData;
}
} else {
// Find in local variables of the file
const variableDeclaration = file.getVariableDeclaration(metadataVariableName);
if (variableDeclaration) {
let variableKind = variableDeclaration.getKind();
if (variableKind && variableKind === SyntaxKind.VariableDeclaration) {
let initializer = variableDeclaration.getInitializer();
if (initializer) {
let initializerKind = initializer.getKind();
if (
initializerKind &&
initializerKind === SyntaxKind.ObjectLiteralExpression
) {
let compilerNode =
initializer.compilerNode as ts.ObjectLiteralExpression;
return compilerNode.properties;
} else if (
initializerKind &&
(initializerKind === SyntaxKind.StringLiteral ||
initializerKind === SyntaxKind.NoSubstitutionTemplateLiteral)
) {
if (decoratorType === 'template') {
return initializer.getText();
} else {
return variableDeclaration.compilerNode;
}
} else if (initializerKind) {
return variableDeclaration.compilerNode;
}
}
}
}
}
return [];
}
public getFileNameOfImport(variableName: string, sourceFile: ts.SourceFile) {
const file =
typeof ast.getSourceFile(sourceFile.fileName) !== 'undefined'
? ast.getSourceFile(sourceFile.fileName)
: ast.addSourceFileAtPath(sourceFile.fileName); // tslint:disable-line
const imports = file.getImportDeclarations();
let searchedImport,
aliasOriginalName = '',
finalPath = '',
foundWithAlias = false;
imports.forEach(i => {
let namedImports = i.getNamedImports(),
namedImportsLength = namedImports.length,
j = 0;
if (namedImportsLength > 0) {
for (j; j < namedImportsLength; j++) {
let importName = namedImports[j].getNameNode().getText() as string,
importAlias;
if (namedImports[j].getAliasNode()) {
importAlias = namedImports[j].getAliasNode().getText();
}
if (importName === variableName) {
searchedImport = i;
break;
}
if (importAlias === variableName) {
foundWithAlias = true;
aliasOriginalName = importName;
searchedImport = i;
break;
}
}
}
});
if (typeof searchedImport !== 'undefined') {
let importPath = path.resolve(
path.dirname(sourceFile.fileName) +
'/' +
searchedImport.getModuleSpecifierValue() +
'.ts'
);
let cleaner = (process.cwd() + path.sep).replace(/\\/g, '/');
finalPath = importPath.replace(cleaner, '');
}
return finalPath;
}
/**
* Find the file path of imported variable
* @param {string} inputVariableName like thestring
* @return {[type]} thestring destination path
*/
public findFilePathOfImportedVariable(inputVariableName, sourceFilePath: string) {
let searchedImport,
finalPath = '',
aliasOriginalName = '',
foundWithAlias = false;
const file =
typeof ast.getSourceFile(sourceFilePath) !== 'undefined'
? ast.getSourceFile(sourceFilePath)
: ast.addSourceFileAtPath(sourceFilePath); // tslint:disable-line
const imports = file.getImportDeclarations();
/**
* Loop through all imports, and find one matching inputVariableName
*/
imports.forEach(i => {
let namedImports = i.getNamedImports(),
namedImportsLength = namedImports.length,
j = 0;
if (namedImportsLength > 0) {
for (j; j < namedImportsLength; j++) {
let importName = namedImports[j].getNameNode().getText() as string,
importAlias;
if (namedImports[j].getAliasNode()) {
importAlias = namedImports[j].getAliasNode().getText();
}
if (importName === inputVariableName) {
searchedImport = i;
break;
}
if (importAlias === inputVariableName) {
foundWithAlias = true;
aliasOriginalName = importName;
searchedImport = i;
break;
}
}
}
});
if (typeof searchedImport !== 'undefined') {
finalPath = path.resolve(
path.dirname(sourceFilePath) +
'/' +
searchedImport.getModuleSpecifierValue() +
'.ts'
);
}
return finalPath;
}
/**
* Find in imports something like VAR.AVAR.BVAR.thestring
* @param {string} inputVariableName like VAR.AVAR.BVAR.thestring
* @return {[type]} thestring value
*/
public findPropertyValueInImportOrLocalVariables(inputVariableName, sourceFile: ts.SourceFile) {
let variablesAttributes = inputVariableName.split('.'),
metadataVariableName = variablesAttributes[0],
searchedImport,
aliasOriginalName = '',
foundWithAlias = false;
const file =
typeof ast.getSourceFile(sourceFile.fileName) !== 'undefined'
? ast.getSourceFile(sourceFile.fileName)
: ast.addSourceFileAtPath(sourceFile.fileName); // tslint:disable-line
const imports = file.getImportDeclarations();
/**
* Loop through all imports, and find one matching inputVariableName
*/
imports.forEach(i => {
let namedImports = i.getNamedImports(),
namedImportsLength = namedImports.length,
j = 0;
if (namedImportsLength > 0) {
for (j; j < namedImportsLength; j++) {
let importName = namedImports[j].getNameNode().getText() as string,
importAlias;
if (namedImports[j].getAliasNode()) {
importAlias = namedImports[j].getAliasNode().getText();
}
if (importName === metadataVariableName) {
searchedImport = i;
break;
}
if (importAlias === metadataVariableName) {
foundWithAlias = true;
aliasOriginalName = importName;
searchedImport = i;
break;
}
}
}
});
let fileToSearchIn, variableDeclaration;
if (typeof searchedImport !== 'undefined') {
let importPath = path.resolve(
path.dirname(sourceFile.fileName) +
'/' +
searchedImport.getModuleSpecifierValue() +
'.ts'
);
const sourceFileImport =
typeof ast.getSourceFile(importPath) !== 'undefined'
? ast.getSourceFile(importPath)
: ast.addSourceFileAtPath(importPath); // tslint:disable-line
if (sourceFileImport) {
fileToSearchIn = sourceFileImport;
let variableName = foundWithAlias ? aliasOriginalName : metadataVariableName;
variableDeclaration = fileToSearchIn.getVariableDeclaration(variableName);
}
} else {
fileToSearchIn = file;
// Find in local variables of the file
variableDeclaration = fileToSearchIn.getVariableDeclaration(metadataVariableName);
}
if (variableDeclaration) {
return this.findInObjectVariableDeclaration(variableDeclaration, variablesAttributes);
}
// Try find it in enums
if (variablesAttributes.length > 0) {
if (typeof fileToSearchIn !== 'undefined') {
let val = this.findInEnums(
fileToSearchIn,
metadataVariableName,
variablesAttributes[1]
);
if (val !== '') {
return val;
}
val = this.findInClasses(
fileToSearchIn,
metadataVariableName,
variablesAttributes[1]
);
if (val !== '') {
return val;
}
}
}
}
}
export default ImportsUtil.getInstance(); | the_stack |
declare module Http {
export const enum Verb {
GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, CONNECT, PATCH
}
export interface Response {
statusCode: number;
body?: string;
}
export interface Requester {
request(verb: Verb, url: string, callback: Callback<Response>): void;
request(verb: Verb, url: string, requestBody: string, callback: Callback<Response>): void;
}
}
interface Window {
codePush: CodePushCordovaPlugin;
}
/**
* Defines a package. All fields are non-nullable, except when retrieving the currently running package on the first run of the app,
* in which case only the appVersion is compulsory.
*
* !! THIS TYPE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !!
*/
interface IPackage {
deploymentKey: string;
description: string;
label: string;
appVersion: string;
isMandatory: boolean;
packageHash: string;
packageSize: number;
failedInstall: boolean;
}
/**
* Defines a remote package, which represents an update package available for download.
*/
interface IRemotePackage extends IPackage {
/**
* The URL at which the package is available for download.
*/
downloadUrl: string;
/**
* Downloads the package update from the CodePush service.
*
* @param downloadSuccess Called with one parameter, the downloaded package information, once the download completed successfully.
* @param downloadError Optional callback invoked in case of an error.
* @param downloadProgress Optional callback invoked during the download process. It is called several times with one DownloadProgress parameter.
*/
download(downloadSuccess: SuccessCallback<ILocalPackage>, downloadError?: ErrorCallback, downloadProgress?: SuccessCallback<DownloadProgress>): void;
/**
* Aborts the current download session, previously started with download().
*
* @param abortSuccess Optional callback invoked if the abort operation succeeded.
* @param abortError Optional callback invoked in case of an error.
*/
abortDownload(abortSuccess?: SuccessCallback<void>, abortError?: ErrorCallback): void;
}
/**
* Defines a local package.
*
* !! THIS TYPE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !!
*/
interface ILocalPackage extends IPackage {
/**
* The local storage path where this package is located.
*/
localPath: string;
/**
* Indicates if the current application run is the first one after the package was applied.
*/
isFirstRun: boolean;
/**
* Applies this package to the application. The application will be reloaded with this package and on every application launch this package will be loaded.
* On the first run after the update, the application will wait for a codePush.notifyApplicationReady() call. Once this call is made, the install operation is considered a success.
* Otherwise, the install operation will be marked as failed, and the application is reverted to its previous version on the next run.
*
* @param installSuccess Callback invoked if the install operation succeeded.
* @param installError Optional callback inovoked in case of an error.
* @param installOptions Optional parameter used for customizing the installation behavior.
*/
install(installSuccess: SuccessCallback<void>, errorCallback?: ErrorCallback, installOptions?: InstallOptions): void;
}
/**
* Decomposed static side of RemotePackage.
* For Class Decomposition guidelines see http://www.typescriptlang.org/Handbook#writing-dts-files-guidelines-and-specifics
*/
interface RemotePackage_Static {
new (): IRemotePackage;
}
/**
* Decomposed static side of LocalPackage.
* For Class Decomposition guidelines see http://www.typescriptlang.org/Handbook#writing-dts-files-guidelines-and-specifics
*/
interface LocalPackage_Static {
new (): ILocalPackage;
}
declare var RemotePackage: RemotePackage_Static;
declare var LocalPackage: LocalPackage_Static;
/**
* Defines the JSON format of the current package information file.
* This file is stored in the local storage of the device and persists between store updates and code-push updates.
*
* !! THIS FILE IS READ FROM NATIVE CODE AS WELL. ANY CHANGES TO THIS INTERFACE NEEDS TO BE UPDATED IN NATIVE CODE !!
*/
interface IPackageInfoMetadata extends ILocalPackage {
nativeBuildTime: string;
}
interface NativeUpdateNotification {
updateAppVersion: boolean; // Always true
appVersion: string;
}
interface Callback<T> { (error: Error, parameter: T): void; }
interface SuccessCallback<T> { (result?: T): void; }
interface ErrorCallback { (error?: Error): void; }
interface Configuration {
appVersion: string;
clientUniqueId: string;
deploymentKey: string;
serverUrl: string;
ignoreAppVersion?: boolean
}
declare class AcquisitionStatus {
static DeploymentSucceeded: string;
static DeploymentFailed: string;
}
declare class AcquisitionManager {
constructor(httpRequester: Http.Requester, configuration: Configuration);
public queryUpdateWithCurrentPackage(currentPackage: IPackage, callback?: Callback<IRemotePackage | NativeUpdateNotification>): void;
public reportStatusDeploy(pkg?: IPackage, status?: string, callback?: Callback<void>): void;
public reportStatusDownload(pkg: IPackage, callback?: Callback<void>): void;
}
interface CodePushCordovaPlugin {
/**
* Get the current package information.
*
* @param packageSuccess Callback invoked with the currently deployed package information.
* @param packageError Optional callback invoked in case of an error.
*/
getCurrentPackage(packageSuccess: SuccessCallback<ILocalPackage>, packageError?: ErrorCallback): void;
/**
* Gets the pending package information, if any. A pending package is one that has been installed but the application still runs the old code.
* This happends only after a package has been installed using ON_NEXT_RESTART or ON_NEXT_RESUME mode, but the application was not restarted/resumed yet.
*/
getPendingPackage(packageSuccess: SuccessCallback<ILocalPackage>, packageError?: ErrorCallback): void;
/**
* Checks with the CodePush server if an update package is available for download.
*
* @param querySuccess Callback invoked in case of a successful response from the server.
* The callback takes one RemotePackage parameter. A non-null package is a valid update.
* A null package means the application is up to date for the current native application version.
* @param queryError Optional callback invoked in case of an error.
* @param deploymentKey Optional deployment key that overrides the config.xml setting.
*/
checkForUpdate(querySuccess: SuccessCallback<IRemotePackage>, queryError?: ErrorCallback, deploymentKey?: string): void;
/**
* Notifies the plugin that the update operation succeeded and that the application is ready.
* Calling this function is required on the first run after an update. On every subsequent application run, calling this function is a noop.
* If using sync API, calling this function is not required since sync calls it internally.
*
* @param notifySucceeded Optional callback invoked if the plugin was successfully notified.
* @param notifyFailed Optional callback invoked in case of an error during notifying the plugin.
*/
notifyApplicationReady(notifySucceeded?: SuccessCallback<void>, notifyFailed?: ErrorCallback): void;
/**
* Reloads the application. If there is a pending update package installed using ON_NEXT_RESTART or ON_NEXT_RESUME modes, the update
* will be immediately visible to the user. Otherwise, calling this function will simply reload the current version of the application.
*/
restartApplication(installSuccess: SuccessCallback<void>, errorCallback?: ErrorCallback): void;
/**
* Convenience method for installing updates in one method call.
* This method is provided for simplicity, and its behavior can be replicated by using window.codePush.checkForUpdate(), RemotePackage's download() and LocalPackage's install() methods.
*
* The algorithm of this method is the following:
* - Checks for an update on the CodePush server.
* - If an update is available
* - If the update is mandatory and the alertMessage is set in options, the user will be informed that the application will be updated to the latest version.
* The update package will then be downloaded and applied.
* - If the update is not mandatory and the confirmMessage is set in options, the user will be asked if they want to update to the latest version.
* If they decline, the syncCallback will be invoked with SyncStatus.UPDATE_IGNORED.
* - Otherwise, the update package will be downloaded and applied with no user interaction.
* - If no update is available on the server, or if a previously rolled back update is available and the ignoreFailedUpdates is set to true, the syncCallback will be invoked with the SyncStatus.UP_TO_DATE.
* - If an error occurs during checking for update, downloading or installing it, the syncCallback will be invoked with the SyncStatus.ERROR.
*
* @param syncCallback Optional callback to be called with the status of the sync operation.
* The callback will be called only once, and the possible statuses are defined by the SyncStatus enum.
* @param syncOptions Optional SyncOptions parameter configuring the behavior of the sync operation.
* @param downloadProgress Optional callback invoked during the download process. It is called several times with one DownloadProgress parameter.
*
*/
sync(syncCallback?: SuccessCallback<SyncStatus>, syncOptions?: SyncOptions, downloadProgress?: SuccessCallback<DownloadProgress>): void;
}
/**
* Defines the possible result statuses of the window.codePush.sync operation.
*/
declare enum SyncStatus {
/**
* The application is up to date.
*/
UP_TO_DATE,
/**
* An update is available, it has been downloaded, unzipped and copied to the deployment folder.
* After the completion of the callback invoked with SyncStatus.UPDATE_INSTALLED, the application will be reloaded with the updated code and resources.
*/
UPDATE_INSTALLED,
/**
* An optional update is available, but the user declined to install it. The update was not downloaded.
*/
UPDATE_IGNORED,
/**
* An error happened during the sync operation. This might be an error while communicating with the server, downloading or unziping the update.
* The console logs should contain more information about what happened. No update has been applied in this case.
*/
ERROR,
/**
* Intermediate status - the plugin is about to check for updates.
*/
CHECKING_FOR_UPDATE,
/**
* Intermediate status - a user dialog is about to be displayed. This status will be reported only if user interaction is enabled.
*/
AWAITING_USER_ACTION,
/**
* Intermediate status - the update package is about to be downloaded.
*/
DOWNLOADING_PACKAGE,
/**
* Intermediate status - the update package is about to be installed.
*/
INSTALLING_UPDATE
}
/**
* Defines the available install modes for updates.
*/
declare enum InstallMode {
/**
* The update will be applied to the running application immediately. The application will be reloaded with the new content immediately.
*/
IMMEDIATE,
/**
* The update is downloaded but not installed immediately. The new content will be available the next time the application is started.
*/
ON_NEXT_RESTART,
/**
* The udpate is downloaded but not installed immediately. The new content will be available the next time the application is resumed or restarted, whichever event happends first.
*/
ON_NEXT_RESUME
}
/**
* Defines the install operation options.
*/
interface InstallOptions {
/**
* Used to specity the InstallMode used for the install operation. This is optional and defaults to InstallMode.ON_NEXT_RESTART.
*/
installMode?: InstallMode;
}
/**
* Defines the sync operation options.
*/
interface SyncOptions extends InstallOptions {
/**
* Optional boolean flag. If set, previous updates which were rolled back will be ignored. Defaults to true.
*/
ignoreFailedUpdates?: boolean;
/**
* Used to enable, disable or customize the user interaction during sync.
* If set to false, user interaction will be disabled. If set to true, the user will be alerted or asked to confirm new updates, based on whether the update is mandatory.
* To customize the user dialog, this option can be set to a custom UpdateDialogOptions instance.
*/
updateDialog?: boolean | UpdateDialogOptions;
/**
* Overrides the config.xml deployment key when checking for updates.
*/
deploymentKey?: string;
}
/**
* Defines the configuration options for the alert or confirmation dialog
*/
interface UpdateDialogOptions {
/**
* If a mandatory update is available and this option is set, the message will be displayed to the user in an alert dialog before downloading and installing the update.
* The user will not be able to cancel the operation, since the update is mandatory.
*/
mandatoryUpdateMessage?: string;
/**
* If an optional update is available and this option is set, the message will be displayed to the user in a confirmation dialog.
* If the user confirms the update, it will be downloaded and installed. Otherwise, the update update is not downloaded.
*/
optionalUpdateMessage?: string;
/**
* The title of the dialog box used for interacting with the user in case of a mandatory or optional update.
* This title will only be used if at least one of mandatoryUpdateMessage or optionalUpdateMessage options are set.
*/
updateTitle?: string;
/**
* The label of the confirmation button in case of an optional update.
*/
optionalInstallButtonLabel?: string;
/**
* The label of the cancel button in case of an optional update.
*/
optionalIgnoreButtonLabel?: string;
/**
* The label of the continue button in case of a mandatory update.
*/
mandatoryContinueButtonLabel?: string;
/**
* Flag indicating if the update description provided by the CodePush server should be displayed in the dialog box appended to the update message.
*/
appendReleaseDescription?: boolean;
/**
* Optional prefix to add to the release description.
*/
descriptionPrefix?: string;
}
/**
* Defines the JSON format of the package diff manifest file.
*/
interface IDiffManifest {
deletedFiles: string[];
}
/**
* Defines the format of the DownloadProgress object, used to send periodical update notifications on the progress of the update download.
*/
interface DownloadProgress {
totalBytes: number;
receivedBytes: number;
} | the_stack |
import xs, {Stream, Listener} from 'xstream';
import {DevToolEnabledSource} from '@cycle/run';
import debounce from 'xstream/extra/debounce';
import * as dagre from 'dagre';
import * as CircularJSON from 'circular-json';
import {ZapSpeed} from './panel/model';
import timeSpread from './utils/timeSpread';
import {SessionSettings} from './launcher';
interface InternalProducer {
type?: string;
}
export interface StreamGraphNode {
id: string;
type: 'source' | 'stream' | 'sink' | 'operator';
label?: string;
stream: Stream<any>;
width: number;
height: number;
x?: number;
y?: number;
}
export interface StreamGraphEdge {
label?: string;
points?: Array<{x: number; y: number}>;
}
export interface Zap {
id: string;
type: 'next' | 'error' | 'complete';
value?: any;
}
type Size = [number, number];
const OPERATOR_NODE_SIZE: Size = [23, 10];
const SOURCE_NODE_SIZE: Size = [23, 23];
const COMMON_NODE_SIZE: Size = [23, 23];
const SINK_NODE_SIZE: Size = [40, 30];
function zapSpeedToMilliseconds(zapSpeed: ZapSpeed): number {
switch (zapSpeed) {
case 'slow':
return 1100;
case 'normal':
return 80;
case 'fast':
return 16;
}
}
class IdTable {
private mutableIncrementingId: number;
public map: Map<Object, number>;
constructor() {
this.mutableIncrementingId = 0;
this.map = new Map<Stream<any>, number>();
}
public getId(thing: Object): string {
if (!this.map.has(thing)) {
const id = this.mutableIncrementingId;
this.map.set(thing, id);
this.mutableIncrementingId += 1;
return String(id);
} else {
return String(this.map.get(thing));
}
}
}
function makeSureNodeIsRegistered(
graph: dagre.graphlib.Graph,
idTable: IdTable,
stream: Stream<any>,
): void {
if (!graph.node(idTable.getId(stream))) {
let node: StreamGraphNode;
if (stream['_isCycleSource']) {
node = {
id: idTable.getId(stream),
type: 'source',
label: (stream as Stream<any> & DevToolEnabledSource)._isCycleSource,
stream: stream,
width: SOURCE_NODE_SIZE[0],
height: SOURCE_NODE_SIZE[1],
};
} else {
node = {
id: idTable.getId(stream),
type: 'stream',
stream: stream,
width: COMMON_NODE_SIZE[0],
height: COMMON_NODE_SIZE[1],
};
}
graph.setNode(idTable.getId(stream), node);
}
}
function visitOperator(
graph: dagre.graphlib.Graph,
idTable: IdTable,
operator: InternalProducer,
): void {
const id = idTable.getId(operator);
if (!graph.node(id)) {
graph.setNode(id, {
id,
type: 'operator',
label: operator.type,
width: OPERATOR_NODE_SIZE[0],
height: OPERATOR_NODE_SIZE[1],
});
}
}
function visitEdge(
graph: dagre.graphlib.Graph,
idTable: IdTable,
inStream: Stream<any>,
operator: InternalProducer,
outStream: Stream<any>,
) {
makeSureNodeIsRegistered(graph, idTable, inStream);
makeSureNodeIsRegistered(graph, idTable, outStream);
graph.setEdge(idTable.getId(inStream), idTable.getId(operator), {});
graph.setEdge(idTable.getId(operator), idTable.getId(outStream), {});
if (!inStream['_isCycleSource']) {
traverse(graph, idTable, inStream);
}
}
function traverse(
graph: dagre.graphlib.Graph,
idTable: IdTable,
outStream: Stream<any>,
): void {
if (outStream._prod && outStream._prod['ins']) {
const inStream: Stream<any> = outStream._prod['ins'];
visitOperator(graph, idTable, outStream._prod);
visitEdge(graph, idTable, inStream, outStream._prod, outStream);
} else if (outStream._prod && outStream._prod['insArr']) {
const insArr: Array<Stream<any>> = outStream._prod['insArr'];
visitOperator(graph, idTable, outStream._prod);
insArr.forEach(inStream => {
visitEdge(graph, idTable, inStream, outStream._prod, outStream);
});
} else if (outStream._prod) {
visitOperator(graph, idTable, outStream._prod);
makeSureNodeIsRegistered(graph, idTable, outStream);
graph.setEdge(idTable.getId(outStream._prod), idTable.getId(outStream), {});
}
}
function buildGraph(sinks: Object): dagre.graphlib.Graph {
const idTable = new IdTable();
const graph = new dagre.graphlib.Graph();
graph.setGraph({nodesep: 60, ranksep: 20});
for (const key in sinks) {
if (sinks.hasOwnProperty(key)) {
const node: StreamGraphNode = {
id: idTable.getId(sinks[key]),
label: key,
type: 'sink',
stream: sinks[key],
width: SINK_NODE_SIZE[0],
height: SINK_NODE_SIZE[1],
};
graph.setNode(idTable.getId(sinks[key]), node);
traverse(graph, idTable, sinks[key]);
}
}
dagre.layout(graph);
return graph;
}
interface Diagram {
graph: dagre.graphlib.Graph;
zaps$: Stream<Array<Zap>>;
}
interface ZapRecord {
id: string;
stream: Stream<any>;
depth: number;
}
class ZapRegistry {
private _presenceSet: Set<string>;
private _records: Array<ZapRecord>;
constructor() {
this._presenceSet = new Set<string>();
this._records = [];
}
public has(id: string): boolean {
return this._presenceSet.has(id);
}
public register(id: string, stream: Stream<any>, depth: number): void {
this._presenceSet.add(id);
this._records.push({id, stream, depth});
}
get records() {
return this._records;
}
}
function setupZapping(
[graph, zapSpeed]: [dagre.graphlib.Graph, ZapSpeed],
): Diagram {
const registry: ZapRegistry = new ZapRegistry();
const sourceNodes: Array<string> = graph['sources']();
sourceNodes.forEach(id => {
zapVisit(id, 0, graph, registry);
});
const rawZap$ = xs.create<Zap>({
start(listener: Listener<Zap>) {
const n = registry.records.length;
for (let i = 0; i < n; i++) {
const record = registry.records[i];
const id = record.id;
record.stream.setDebugListener({
next: value => listener.next({id, type: 'next', value} as Zap),
error: err => listener.next({id, type: 'error', value: err} as Zap),
complete: () => listener.next({id, type: 'complete'} as Zap),
});
}
},
stop() {},
});
const actualZaps$ = rawZap$.compose(
timeSpread(zapSpeedToMilliseconds(zapSpeed)),
);
const stopZaps$: Stream<Array<any>> = actualZaps$
.mapTo([])
.compose(debounce(200))
.startWith([]);
const zaps$ = xs.merge(actualZaps$, stopZaps$);
return {graph, zaps$};
}
function zapVisit(
nodeId: string,
depth: number,
graph: dagre.graphlib.Graph,
registry: ZapRegistry,
) {
if (registry.has(nodeId)) {
return;
} else {
const node: StreamGraphNode = graph.node(nodeId);
if (node.type !== 'operator') {
registry.register(nodeId, node.stream, depth);
}
const successors: Array<string> = graph['successors'](nodeId);
successors.forEach(id => {
zapVisit(id, depth + 1, graph, registry);
});
}
}
function makeObjectifyGraph(id$: Stream<string>) {
return function objectifyGraph(diagram$: Stream<Diagram>): Stream<Object> {
return xs
.combine(diagram$, id$)
.map(([{graph, zaps$}, id]) => {
const object = dagre.graphlib['json'].write(graph);
const n = object.nodes.length;
for (let i = 0; i < n; i++) {
delete object.nodes[i].stream;
}
return zaps$.map(zaps => {
object.zaps = zaps;
object.id = id;
return object;
});
})
.flatten();
};
}
function sinksAreXStream(sinks: Object | null): boolean {
if (sinks === null) {
return false;
}
for (const key in sinks) {
if (sinks.hasOwnProperty(key)) {
if (sinks[key] && typeof sinks[key].setDebugListener !== 'function') {
return false;
}
}
}
return true;
}
interface GraphSerializerSources {
id: Stream<string>;
DebugSinks: Stream<Object | null>;
FromPanel: Stream<string>;
Settings: Stream<SessionSettings>;
}
interface GraphSerializerSinks {
graph: Stream<string>;
}
function GraphSerializer(
sources: GraphSerializerSources,
): GraphSerializerSinks {
const zapSpeed$ = sources.Settings
.map(settings =>
(sources.FromPanel as Stream<ZapSpeed>).startWith(settings.zapSpeed),
)
.flatten();
const graph$ = sources.DebugSinks.filter(sinksAreXStream).map(buildGraph);
const serializedGraph$ = xs
.combine(graph$, zapSpeed$)
.map(setupZapping)
.compose(makeObjectifyGraph(sources.id))
.map(object => CircularJSON.stringify(object));
const invalid$ = sources.DebugSinks
.filter(x => !sinksAreXStream(x))
.mapTo('');
return {
graph: xs.merge(serializedGraph$, invalid$),
};
}
const panelMessage$ = xs.create<string>({
start(listener: Listener<string>) {
window['receivePanelMessage'] = function receivePanelMessage(msg: string) {
listener.next(msg);
};
},
stop() {},
});
let started = false;
function startGraphSerializer(appSinks: Object | null) {
if (started) {
return;
}
const serializerSources: GraphSerializerSources = {
id: xs.of(`graph-${Math.round(Math.random() * 1000000000)}`),
DebugSinks: xs.of(appSinks),
FromPanel: panelMessage$,
Settings: xs.of<SessionSettings>(window['CyclejsDevToolSettings']),
};
const serializerSinks = GraphSerializer(serializerSources);
serializerSinks.graph.addListener({
next: graph => {
// console.log('GRAPH SERIALIZER send message to CONTENT SCRIPT: ' + graph);
// Send message to the CONTENT SCRIPT
const event = new CustomEvent('CyclejsDevToolEvent', {detail: graph});
document.dispatchEvent(event);
},
error: (err: any) => {
console.error('Cycle.js DevTool (graph serializer):\n' + err);
console.error(err.stack);
},
complete: () => {},
});
started = true;
}
window['CyclejsDevTool_startGraphSerializer'] = startGraphSerializer;
const intervalID = setInterval(function() {
if (window['Cyclejs'] && window['Cyclejs'].sinks) {
clearInterval(intervalID);
startGraphSerializer(window['Cyclejs'].sinks);
} else {
clearInterval(intervalID);
startGraphSerializer(null);
}
}, 50);
/* tslint:enable:max-file-line-count */ | the_stack |
import * as express from 'express';
import fetch, {Response as FetchResponse, RequestInit} from 'node-fetch';
import {ApiError} from 'app/common/ApiError';
import {getSlugIfNeeded, parseSubdomainStrictly} from 'app/common/gristUrls';
import {removeTrailingSlash} from 'app/common/gutil';
import {LocalPlugin} from "app/common/plugin";
import {Document as APIDocument} from 'app/common/UserAPI';
import {Document} from "app/gen-server/entity/Document";
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
import {assertAccess, getTransitiveHeaders, getUserId, isAnonymousUser,
RequestWithLogin} from 'app/server/lib/Authorizer';
import {DocStatus, IDocWorkerMap} from 'app/server/lib/DocWorkerMap';
import {expressWrap} from 'app/server/lib/expressWrap';
import {getAssignmentId} from 'app/server/lib/idUtils';
import * as log from 'app/server/lib/log';
import {adaptServerUrl, addOrgToPathIfNeeded, pruneAPIResult, trustOrigin} from 'app/server/lib/requestUtils';
import {ISendAppPageOptions} from 'app/server/lib/sendAppPage';
export interface AttachOptions {
app: express.Application; // Express app to which to add endpoints
middleware: express.RequestHandler[]; // Middleware to apply for all endpoints except docs
docMiddleware: express.RequestHandler[]; // Middleware to apply for doc landing pages
forceLogin: express.RequestHandler|null; // Method to force user to login (if logins are possible)
docWorkerMap: IDocWorkerMap|null;
sendAppPage: (req: express.Request, resp: express.Response, options: ISendAppPageOptions) => Promise<void>;
dbManager: HomeDBManager;
plugins: LocalPlugin[];
}
/**
* This method transforms a doc worker's public url as needed based on the request.
*
* For historic reasons, doc workers are assigned a public url at the time
* of creation. In production/staging, this is of the form:
* https://doc-worker-NNN-NNN-NNN-NNN.getgrist.com/v/VVVV/
* and in dev:
* http://localhost:NNNN/v/VVVV/
*
* Prior to support for different base domains, this was fine. Now that different
* base domains are supported, a wrinkle arises. When a web client communicates
* with a doc worker, it is important that it accesses the doc worker via a url
* containing the same base domain as the web page the client is on (for cookie
* purposes). Hence this method.
*
* If both the request and docWorkerUrl contain identifiable base domains (not localhost),
* then the base domain of docWorkerUrl is replaced with that of the request.
*
* But wait, there's another wrinkle: custom domains. In this case, we have a single
* domain available to serve a particular org from. This method will use the origin of req
* and include a /dw/doc-worker-NNN-NNN-NNN-NNN/
* (or /dw/local-NNNN/) prefix in all doc worker paths. Once this is in place, it
* will allow doc worker routing to be changed so it can be overlaid on a custom
* domain.
*
* TODO: doc worker registration could be redesigned to remove the assumption
* of a fixed base domain.
*/
function customizeDocWorkerUrl(docWorkerUrlSeed: string, req: express.Request) {
const docWorkerUrl = new URL(docWorkerUrlSeed);
const workerSubdomain = parseSubdomainStrictly(docWorkerUrl.hostname).org;
adaptServerUrl(docWorkerUrl, req);
// We wish to migrate to routing doc workers by path, so insert a doc worker identifier
// in the path (if not already present).
if (!docWorkerUrl.pathname.startsWith('/dw/')) {
// When doc worker is localhost, the port number is necessary and sufficient for routing.
// Let's add a /dw/... prefix just for consistency.
const workerIdent = workerSubdomain || `local-${docWorkerUrl.port}`;
docWorkerUrl.pathname = `/dw/${workerIdent}${docWorkerUrl.pathname}`;
}
return docWorkerUrl.href;
}
/**
*
* Gets the worker responsible for a given assignment, and fetches a url
* from the worker.
*
* If the fetch fails, we throw an exception, unless we see enough evidence
* to unassign the worker and try again.
*
* - If GRIST_MANAGED_WORKERS is set, we assume that we've arranged
* for unhealthy workers to be removed automatically, and that if a
* fetch returns a 404 with specific content, it is proof that the
* worker is no longer in existence. So if we see a 404 with that
* specific content, we can safely de-list the worker from redis,
* and repeat.
* - If GRIST_MANAGED_WORKERS is not set, we accept a broader set
* of failures as evidence of a missing worker.
*
* The specific content of a 404 that will be treated as evidence of
* a doc worker not being present is:
* - A json format body
* - With a key called "message"
* - With the value of "message" being "document worker not present"
* In production, this is provided by a special doc-worker-* load balancer
* rule.
*
*/
async function getWorker(docWorkerMap: IDocWorkerMap, assignmentId: string,
urlPath: string, config: RequestInit = {}) {
let docStatus: DocStatus|undefined;
const workersAreManaged = Boolean(process.env.GRIST_MANAGED_WORKERS);
for (;;) {
docStatus = await docWorkerMap.assignDocWorker(assignmentId);
const configWithTimeout = {timeout: 10000, ...config};
const fullUrl = removeTrailingSlash(docStatus.docWorker.internalUrl) + urlPath;
try {
const resp: FetchResponse = await fetch(fullUrl, configWithTimeout);
if (resp.ok) {
return {
resp,
docStatus,
};
}
if (resp.status === 403) {
throw new ApiError("You do not have access to this document.", resp.status);
}
if (resp.status !== 404) {
throw new ApiError(resp.statusText, resp.status);
}
let body: any;
try {
body = await resp.json();
} catch (e) {
throw new ApiError(resp.statusText, resp.status);
}
if (!(body && body.message && body.message === 'document worker not present')) {
throw new ApiError(resp.statusText, resp.status);
}
// This is a 404 with the expected content for a missing worker.
} catch (e) {
// If workers are managed, no errors merit continuing except a 404.
// Otherwise, we continue if we see a system error (e.g. ECONNREFUSED).
// We don't accept timeouts since there is too much potential to
// bring down a single-worker deployment that has a hiccup.
if (workersAreManaged || !(e.type === 'system')) {
throw e;
}
}
log.warn(`fetch from ${fullUrl} failed convincingly, removing that worker`);
await docWorkerMap.removeWorker(docStatus.docWorker.id);
docStatus = undefined;
}
}
export function attachAppEndpoint(options: AttachOptions): void {
const {app, middleware, docMiddleware, docWorkerMap, forceLogin, sendAppPage, dbManager, plugins} = options;
// Per-workspace URLs open the same old Home page, and it's up to the client to notice and
// render the right workspace.
app.get(['/', '/ws/:wsId', '/p/:page'], ...middleware, expressWrap(async (req, res) =>
sendAppPage(req, res, {path: 'app.html', status: 200, config: {plugins}, googleTagManager: 'anon'})));
app.get('/api/worker/:assignmentId([^/]+)/?*', expressWrap(async (req, res) => {
if (!trustOrigin(req, res)) { throw new Error('Unrecognized origin'); }
res.header("Access-Control-Allow-Credentials", "true");
if (!docWorkerMap) {
return res.status(500).json({error: 'no worker map'});
}
const assignmentId = getAssignmentId(docWorkerMap, req.params.assignmentId);
const {docStatus} = await getWorker(docWorkerMap, assignmentId, '/status');
if (!docStatus) {
return res.status(500).json({error: 'no worker'});
}
res.json({docWorkerUrl: customizeDocWorkerUrl(docStatus.docWorker.publicUrl, req)});
}));
// Handler for serving the document landing pages. Expects the following parameters:
// urlId, slug (optional), remainder
// This handler is used for both "doc/urlId" and "urlId/slug" style endpoints.
const docHandler = expressWrap(async (req, res, next) => {
if (req.params.slug && req.params.slug === 'app.html') {
// This can happen on a single-port configuration, since "docId/app.html" matches
// the "urlId/slug" pattern. Luckily the "." character is not allowed in slugs.
return next();
}
if (!docWorkerMap) {
return await sendAppPage(req, res, {path: 'app.html', status: 200, config: {plugins},
googleTagManager: 'anon'});
}
const mreq = req as RequestWithLogin;
const urlId = req.params.urlId;
let doc: Document|null = null;
try {
const userId = getUserId(mreq);
// Query DB for the doc metadata, to include in the page (as a pre-fetch of getDoc() call),
// and to get fresh (uncached) access info.
doc = await dbManager.getDoc({userId, org: mreq.org, urlId});
const slug = getSlugIfNeeded(doc);
const slugMismatch = (req.params.slug || null) !== (slug || null);
const preferredUrlId = doc.urlId || doc.id;
if (urlId !== preferredUrlId || slugMismatch) {
// Prepare to redirect to canonical url for document.
// Preserve any query parameters or fragments.
const queryOrFragmentCheck = req.originalUrl.match(/([#?].*)/);
const queryOrFragment = (queryOrFragmentCheck && queryOrFragmentCheck[1]) || '';
const target = slug ?
`/${preferredUrlId}/${slug}${req.params.remainder}${queryOrFragment}` :
`/doc/${preferredUrlId}${req.params.remainder}${queryOrFragment}`;
res.redirect(addOrgToPathIfNeeded(req, target));
return;
}
// The docAuth value will be cached from the getDoc() above (or could be derived from doc).
const docAuth = await dbManager.getDocAuthCached({userId, org: mreq.org, urlId});
assertAccess('viewers', docAuth);
} catch (err) {
if (err.status === 404) {
log.info("/:urlId/app.html did not find doc", mreq.userId, urlId, doc && doc.access, mreq.org);
throw new ApiError('Document not found.', 404);
} else if (err.status === 403) {
log.info("/:urlId/app.html denied access", mreq.userId, urlId, doc && doc.access, mreq.org);
// If the user does not have access to the document, and is anonymous, and we
// have a login system, we may wish to redirect them to login process.
if (isAnonymousUser(mreq) && forceLogin) {
// First check if anonymous user has access to this org. If so, we don't propose
// that they log in. This is the same check made in redirectToLogin() middleware.
const result = await dbManager.getOrg({userId: getUserId(mreq)}, mreq.org || null);
if (result.status !== 200) {
// Anonymous user does not have any access to this org, or to this doc.
// Redirect to log in.
return forceLogin(req, res, next);
}
}
throw new ApiError('You do not have access to this document.', 403);
}
throw err;
}
// The reason to pass through app.html fetched from docWorker is in case it is a different
// version of Grist (could be newer or older).
// TODO: More must be done for correct version tagging of URLs: <base href> assumes all
// links and static resources come from the same host, but we'll have Home API, DocWorker,
// and static resources all at hostnames different from where this page is served.
// TODO docWorkerMain needs to serve app.html, perhaps with correct base-href already set.
const docId = doc.id;
const headers = {
Accept: 'application/json',
...getTransitiveHeaders(req),
};
const {docStatus, resp} = await getWorker(docWorkerMap, docId,
`/${docId}/app.html`, {headers});
const body = await resp.json();
await sendAppPage(req, res, {path: "", content: body.page, tag: body.tag, status: 200,
googleTagManager: 'anon', config: {
assignmentId: docId,
getWorker: {[docId]: customizeDocWorkerUrl(docStatus.docWorker.publicUrl, req)},
getDoc: {[docId]: pruneAPIResult(doc as unknown as APIDocument)},
plugins
}});
});
// The * is a wildcard in express 4, rather than a regex symbol.
// See https://expressjs.com/en/guide/routing.html
app.get('/doc/:urlId([^/]+):remainder(*)', ...docMiddleware, docHandler);
app.get('/:urlId([^/]{12,})/:slug([^/]+):remainder(*)',
...docMiddleware, docHandler);
} | the_stack |
import { DatabaseAdapter, DatabaseSession, DeleteResult, Formatter, GenericQueryResolver, OrmEntity, PatchResult } from '@deepkit/orm';
import { Changes, getPartialSerializeFunction, ReflectionClass, ReflectionKind, ReflectionVisibility, resolveForeignReflectionClass, serializer, typeOf } from '@deepkit/type';
import { MongoClient } from './client/client';
import { AggregateCommand } from './client/command/aggregate';
import { CountCommand } from './client/command/count';
import { DeleteCommand } from './client/command/delete';
import { FindCommand } from './client/command/find';
import { FindAndModifyCommand } from './client/command/findAndModify';
import { UpdateCommand } from './client/command/update';
import { convertClassQueryToMongo } from './mapping';
import { DEEP_SORT, FilterQuery, MongoQueryModel } from './query.model';
import { MongoConnection } from './client/connection';
import { MongoDatabaseAdapter } from './adapter';
import { empty } from '@deepkit/core';
import { mongoSerializer } from './mongo-serializer';
export function getMongoFilter<T>(classSchema: ReflectionClass<T>, model: MongoQueryModel<T>): any {
return convertClassQueryToMongo(classSchema, (model.filter || {}) as FilterQuery<T>, {}, {
$parameter: (name, value) => {
if (undefined === model.parameters[value]) {
throw new Error(`Parameter ${value} not defined in ${classSchema.getClassName()} query.`);
}
return model.parameters[value];
}
});
}
interface CountSchema {
count: number;
}
export class MongoQueryResolver<T extends OrmEntity> extends GenericQueryResolver<T, DatabaseAdapter, MongoQueryModel<T>> {
protected countSchema = ReflectionClass.from(typeOf<CountSchema>());
constructor(
classSchema: ReflectionClass<T>,
protected session: DatabaseSession<MongoDatabaseAdapter>,
protected client: MongoClient,
) {
super(classSchema, session);
}
async has(model: MongoQueryModel<T>): Promise<boolean> {
return await this.count(model) > 0;
}
protected getPrimaryKeysProjection(classSchema: ReflectionClass<any>) {
const pk: { [name: string]: 1 | 0 } = { _id: 0 };
for (const property of classSchema.getPrimaries()) {
pk[property.name] = 1;
}
return pk;
}
protected async fetchIds(queryModel: MongoQueryModel<T>, limit: number = 0, connection: MongoConnection): Promise<any[]> {
const primaryKeyName = this.classSchema.getPrimary().name;
const projection = { [primaryKeyName]: 1 as const };
if (queryModel.hasJoins()) {
const pipeline = this.buildAggregationPipeline(queryModel);
if (limit) pipeline.push({ $limit: limit });
pipeline.push({ $project: projection });
const command = new AggregateCommand(this.classSchema, pipeline);
command.partial = true;
const items = await connection.execute(command);
return items.map(v => v[primaryKeyName]);
} else {
const mongoFilter = getMongoFilter(this.classSchema, queryModel);
const items = await connection.execute(new FindCommand(this.classSchema, mongoFilter, projection, undefined, limit || queryModel.limit, queryModel.skip));
return items.map(v => v[primaryKeyName]);
}
}
public async delete(queryModel: MongoQueryModel<T>, deleteResult: DeleteResult<T>): Promise<void> {
const connection = await this.client.getConnection(undefined, this.session.assignedTransaction);
try {
const primaryKeys = await this.fetchIds(queryModel, queryModel.limit, connection);
if (primaryKeys.length === 0) return;
deleteResult.modified = primaryKeys.length;
deleteResult.primaryKeys = primaryKeys;
const primaryKeyName = this.classSchema.getPrimary().name;
const query = convertClassQueryToMongo(this.classSchema, { [primaryKeyName]: { $in: primaryKeys } } as FilterQuery<T>);
await connection.execute(new DeleteCommand(this.classSchema, query, queryModel.limit));
} finally {
connection.release();
}
}
public async patch(model: MongoQueryModel<T>, changes: Changes<T>, patchResult: PatchResult<T>): Promise<void> {
if (model.hasJoins()) {
throw new Error('Not implemented: Use aggregate to retrieve ids, then do the query');
}
const filter = getMongoFilter(this.classSchema, model) || {};
const partialSerialize = getPartialSerializeFunction(this.classSchema.type, mongoSerializer.serializeRegistry);
const partialDeserialize = getPartialSerializeFunction(this.classSchema.type, serializer.deserializeRegistry);
const u: any = {};
if (changes.$set) u.$set = changes.$set;
if (changes.$unset) u.$set = changes.$unset;
if (changes.$inc) u.$inc = changes.$inc;
if (u.$set) {
u.$set = partialSerialize(u.$set);
}
const primaryKeyName = this.classSchema.getPrimary().name;
const returning = new Set([...model.returning, ...changes.getReturning()]);
const connection = await this.client.getConnection(undefined, this.session.assignedTransaction);
try {
if (model.limit === 1) {
const command = new FindAndModifyCommand(
this.classSchema,
filter,
u
);
command.returnNew = true;
command.fields = [primaryKeyName, ...returning];
const res = await connection.execute(command);
patchResult.modified = res.value ? 1 : 0;
if (res.value) {
const converted = partialDeserialize(res.value) as any;
patchResult.primaryKeys = [converted[primaryKeyName]];
for (const name of returning) {
patchResult.returning[name] = [converted[name]];
}
}
return;
}
patchResult.modified = await connection.execute(new UpdateCommand(this.classSchema, [{
q: filter,
u: u,
multi: !model.limit
}]));
if (!returning.size) return;
const projection: { [name: string]: 1 | 0 } = {};
projection[primaryKeyName] = 1;
for (const name of returning) {
projection[name] = 1;
patchResult.returning[name] = [];
}
const items = await connection.execute(new FindCommand(this.classSchema, filter, projection, {}, model.limit, model.skip));
for (const item of items) {
const converted = partialDeserialize(item);
patchResult.primaryKeys.push(converted[primaryKeyName]);
for (const name of returning) {
patchResult.returning[name].push(converted[name]);
}
}
} finally {
connection.release();
}
}
public async count(queryModel: MongoQueryModel<T>) {
const connection = await this.client.getConnection(undefined, this.session.assignedTransaction);
try {
//count command is not supported for transactions
if (queryModel.hasJoins() || this.session.assignedTransaction) {
const pipeline = this.buildAggregationPipeline(queryModel);
pipeline.push({ $count: 'count' });
const command = new AggregateCommand<any, CountSchema>(this.classSchema, pipeline, this.countSchema);
const items = await connection.execute(command);
return items.length ? items[0].count : 0;
} else {
const query = getMongoFilter(this.classSchema, queryModel);
if (empty(query)) {
//when a query is empty, mongo returns an estimated count from meta-data.
//we don't want estimates, we want deterministic results, so we add a query
const primaryKey = this.classSchema.getPrimary().name;
query[primaryKey] = { $nin: [] };
}
return await connection.execute(new CountCommand(
this.classSchema,
query,
queryModel.limit,
queryModel.skip,
));
}
} finally {
connection.release();
}
}
protected getSchemaWithJoins(): ReflectionClass<any> {
const jit = this.classSchema.getJitContainer();
if (jit.ormMongoSchemaWithJoins) return jit.ormMongoSchemaWithJoins;
const schema = this.classSchema.clone();
for (const property of schema.getProperties().slice()) {
if (property.isReference() || property.isBackReference()) {
const name = '__ref_' + property.name;
schema.addProperty({
name,
type: { kind: ReflectionKind.any },
visibility: ReflectionVisibility.public
});
}
}
return jit.ormMongoSchemaWithJoins = schema;
}
public async findOneOrUndefined(model: MongoQueryModel<T>): Promise<T | undefined> {
const connection = await this.client.getConnection(undefined, this.session.assignedTransaction);
try {
if (model.hasJoins() || model.isAggregate()) {
const pipeline = this.buildAggregationPipeline(model);
pipeline.push({ $limit: 1 });
const resultsSchema = model.isAggregate() ? this.getCachedAggregationSchema(model) : this.getSchemaWithJoins();
const command = new AggregateCommand(this.classSchema, pipeline, resultsSchema);
command.partial = model.isPartial();
const items = await connection.execute(command);
if (items.length) {
const formatter = this.createFormatter(model.withIdentityMap);
return formatter.hydrate(model, items[0]);
}
} else {
const items = await connection.execute(new FindCommand(
this.classSchema,
getMongoFilter(this.classSchema, model),
this.getProjection(this.classSchema, model.select),
this.getSortFromModel(model.sort),
1,
model.skip,
));
if (items.length) {
const formatter = this.createFormatter(model.withIdentityMap);
return formatter.hydrate(model, items[0]);
}
}
return;
} finally {
connection.release();
}
}
protected getCachedAggregationSchema(model: MongoQueryModel<T>): ReflectionClass<any> {
const jit = this.classSchema.getJitContainer();
const keys: string[] = [...model.groupBy.values()];
for (const [g, a] of model.aggregate.entries()) {
keys.push(g + ':' + a.func);
}
const cacheKey = 'ormMongoAggregation' + keys.join('/');
if (jit[cacheKey]) return jit[cacheKey];
const schema = this.getSchemaWithJoins().clone();
for (const g of model.groupBy.values()) {
schema.addProperty({
name: g,
type: { kind: ReflectionKind.any },
visibility: ReflectionVisibility.public
});
}
for (const g of model.aggregate.keys()) {
schema.addProperty({
name: g,
type: { kind: ReflectionKind.any },
visibility: ReflectionVisibility.public
});
}
return jit[cacheKey] = schema;
}
public async find(model: MongoQueryModel<T>): Promise<T[]> {
const formatter = this.createFormatter(model.withIdentityMap);
const connection = await this.client.getConnection(undefined, this.session.assignedTransaction);
try {
if (model.hasJoins() || model.isAggregate()) {
const pipeline = this.buildAggregationPipeline(model);
const resultsSchema = model.isAggregate() ? this.getCachedAggregationSchema(model) : this.getSchemaWithJoins();
const command = new AggregateCommand(this.classSchema, pipeline, resultsSchema);
command.partial = model.isPartial();
const items = await connection.execute(command);
if (model.isAggregate()) {
return items;
}
return items.map(v => formatter.hydrate(model, v));
} else {
const items = await connection.execute(new FindCommand(
this.classSchema,
getMongoFilter(this.classSchema, model),
this.getProjection(this.classSchema, model.select),
this.getSortFromModel(model.sort),
model.limit,
model.skip,
));
return items.map(v => formatter.hydrate(model, v));
}
} finally {
connection.release();
}
}
protected buildAggregationPipeline(model: MongoQueryModel<T>) {
const joinRefs: string[] = [];
const handleJoins = <T>(pipeline: any[], query: MongoQueryModel<T>, schema: ReflectionClass<any>) => {
for (const join of query.joins) {
//refs are deserialized as `any` and then further deserialized using the default serializer
join.as = '__ref_' + join.propertySchema.name;
joinRefs.push(join.as);
const foreignSchema = resolveForeignReflectionClass(join.propertySchema);
const joinPipeline: any[] = [];
if (join.propertySchema.isBackReference()) {
if (join.propertySchema.getBackReference().via) {
} else {
const backReference = foreignSchema.findReverseReference(
join.classSchema.getClassType(),
join.propertySchema,
);
joinPipeline.push({
$match: { $expr: { $eq: ['$' + backReference.getForeignKeyName(), '$$foreign_id'] } }
});
}
} else {
joinPipeline.push({
$match: { $expr: { $eq: ['$' + join.foreignPrimaryKey.name, '$$foreign_id'] } }
});
}
if (join.query.model.hasJoins()) {
handleJoins(joinPipeline, join.query.model, foreignSchema);
}
if (join.query.model.filter) joinPipeline.push({ $match: getMongoFilter(join.query.classSchema, join.query.model) });
if (join.query.model.sort) joinPipeline.push({ $sort: this.getSortFromModel(join.query.model.sort) });
if (join.query.model.skip) joinPipeline.push({ $skip: join.query.model.skip });
if (join.query.model.limit) joinPipeline.push({ $limit: join.query.model.limit });
if (join.populate) {
const projection = this.getProjection(join.query.classSchema, join.query.model.select);
// if (!join.classSchema.hasProperty('_id') || (join.query.model.isPartial() && !join.query.model.isSelected('_id'))) {
// project['_id'] = 0;
// }
if (projection) joinPipeline.push({ $project: projection });
} else {
//not populated, so only fetch primary key.
const projection = this.getPrimaryKeysProjection(foreignSchema);
joinPipeline.push({ $project: projection });
}
if (join.propertySchema.isBackReference()) {
if (join.propertySchema.getBackReference().via) {
//many-to-many
const viaClassSchema = ReflectionClass.from(join.propertySchema.getBackReference().via);
const subAs = join.propertySchema.name;
const backReference = viaClassSchema.findReverseReference(
join.classSchema.getClassType(),
join.propertySchema,
//mappedBy is not for pivot tables. We would need 2 different mappedBy
// join.propertySchema.backReference.mappedBy as string
);
pipeline.push({
$lookup: {
from: this.client.resolveCollectionName(viaClassSchema),
let: { localField: '$' + join.classSchema.getPrimary().name },
pipeline: [
{ $match: { $expr: { $eq: ['$' + backReference.getForeignKeyName(), '$$localField'] } } }
],
as: subAs,
},
});
const foreignSchema = resolveForeignReflectionClass(join.propertySchema);
const backReferenceForward = viaClassSchema.findReverseReference(
foreignSchema.getClassType(),
join.propertySchema,
//mappedBy is not for pivot tables. We would need 2 different mappedBy
// join.propertySchema.backReference.mappedBy as string
);
pipeline.push({
$addFields: { [subAs]: '$' + subAs + '.' + backReferenceForward.getForeignKeyName() },
});
pipeline.push({
$lookup: {
from: this.client.resolveCollectionName(foreignSchema),
let: { localField: '$' + subAs },
pipeline: [
{ $match: { $expr: { $in: ['$' + foreignSchema.getPrimary().name, '$$localField'] } } }
].concat(joinPipeline),
as: join.as,
},
});
//important to unset the actual property in the database since its type is incompatible with the declared type in TS. (foreign key vs objects)
pipeline.push({
$unset: [join.propertySchema.name],
});
} else {
//one-to-many
pipeline.push({
$lookup: {
from: this.client.resolveCollectionName(foreignSchema),
let: { foreign_id: '$' + join.classSchema.getPrimary().name },
pipeline: joinPipeline,
as: join.as,
},
});
}
} else {
pipeline.push({
$lookup: {
from: this.client.resolveCollectionName(foreignSchema),
let: { foreign_id: '$' + join.propertySchema.getForeignKeyName() },
pipeline: joinPipeline,
as: join.as,
},
});
}
if (join.propertySchema.isArray()) {
if (join.type === 'inner') {
pipeline.push({
$match: { [join.as]: { $ne: [] } }
});
}
} else {
//for *toOne relations, since mongodb joins always as array
pipeline.push({
$unwind: {
path: '$' + join.as,
preserveNullAndEmptyArrays: join.type === 'left'
}
});
}
}
};
const pipeline: any[] = [];
handleJoins(pipeline, model, this.classSchema);
if (model.filter) pipeline.push({ $match: getMongoFilter(this.classSchema, model) });
if (model.isAggregate()) {
const group: any = { _id: {} };
const project: any = {};
for (const g of model.groupBy.values()) {
group._id[g] = '$' + g;
project[g] = '$_id.' + g;
}
for (const [as, a] of model.aggregate.entries()) {
if (a.func === 'sum') {
group[as] = { $sum: '$' + a.property.name };
} else if (a.func === 'min') {
group[as] = { $min: '$' + a.property.name };
} else if (a.func === 'max') {
group[as] = { $max: '$' + a.property.name };
} else if (a.func === 'avg') {
group[as] = { $avg: '$' + a.property.name };
} else if (a.func === 'count') {
group[as] = { $sum: 1 };
} else if (a.func === 'group_concat') {
group[as] = { $push: '$' + a.property.name };
}
project[as] = 1;
}
pipeline.push({ $group: group });
pipeline.push({ $project: project });
}
if (model.sort) pipeline.push({ $sort: this.getSortFromModel(model.sort) });
if (model.skip) pipeline.push({ $skip: model.skip });
if (model.limit) pipeline.push({ $limit: model.limit });
if (!model.isAggregate()) {
const projection = this.getProjection(this.classSchema, model.select);
if (projection) {
for (const name of joinRefs) {
(projection as any)[name] = 1;
}
}
if (projection) pipeline.push({ $project: projection });
}
return pipeline;
}
/**
* Returns undefined when no selection limitation has happened. When non-undefined
* the mongo driver returns a t.partial.
*/
protected getProjection<T>(classSchema: ReflectionClass<any>, select: Set<string>): { [name: string]: 0 | 1 } | undefined {
const res: { [name: string]: 0 | 1 } = {};
//as soon as we provide a {} to find/aggregate command, it triggers t.partial()
if (select.size) {
res['_id'] = 0;
for (const v of select.values()) {
(res as any)[v] = 1;
}
// for (const property of this.classSchema.getPrimaries()) {
// (res as any)[property.name] = 1;
// }
return res;
} else {
// for (const v of classSchema.getPropertiesMap().keys()) {
// (res as any)[v] = 1;
// }
return undefined;
}
}
protected createFormatter(withIdentityMap: boolean = false) {
return new Formatter(
this.classSchema,
serializer,
this.session.getHydrator(),
withIdentityMap ? this.session.identityMap : undefined
);
}
protected getSortFromModel<T>(modelSort?: DEEP_SORT<T>) {
const sort: { [name: string]: -1 | 1 | { $meta: 'textScore' } } = {};
if (modelSort) {
for (const [i, v] of Object.entries(modelSort)) {
sort[i] = v === 'asc' ? 1 : (v === 'desc' ? -1 : v);
}
}
return sort;
}
} | the_stack |
import type { LensRendererExtension } from "../../../extensions/lens-renderer-extension";
import rendererExtensionsInjectable from "../../../extensions/renderer-extensions.injectable";
import currentlyInClusterFrameInjectable from "../../routes/currently-in-cluster-frame.injectable";
import { extensionRegistratorInjectionToken } from "../../../extensions/extension-loader/extension-registrator-injection-token";
import type { IObservableArray } from "mobx";
import { computed, observable, runInAction } from "mobx";
import { renderFor } from "./renderFor";
import React from "react";
import { Router } from "react-router";
import { Observer } from "mobx-react";
import subscribeStoresInjectable from "../../kube-watch-api/subscribe-stores.injectable";
import allowedResourcesInjectable from "../../../common/cluster-store/allowed-resources.injectable";
import type { RenderResult } from "@testing-library/react";
import { fireEvent } from "@testing-library/react";
import type { KubeResource } from "../../../common/rbac";
import { Sidebar } from "../layout/sidebar";
import { getDisForUnitTesting } from "../../../test-utils/get-dis-for-unit-testing";
import type { DiContainer } from "@ogre-tools/injectable";
import clusterStoreInjectable from "../../../common/cluster-store/cluster-store.injectable";
import type { ClusterStore } from "../../../common/cluster-store/cluster-store";
import mainExtensionsInjectable from "../../../extensions/main-extensions.injectable";
import currentRouteComponentInjectable from "../../routes/current-route-component.injectable";
import { pipeline } from "@ogre-tools/fp";
import { flatMap, compact, join, get, filter } from "lodash/fp";
import preferenceNavigationItemsInjectable from "../+preferences/preferences-navigation/preference-navigation-items.injectable";
import navigateToPreferencesInjectable from "../../../common/front-end-routing/routes/preferences/navigate-to-preferences.injectable";
import type { MenuItemOpts } from "../../../main/menu/application-menu-items.injectable";
import applicationMenuItemsInjectable from "../../../main/menu/application-menu-items.injectable";
import type { MenuItem, MenuItemConstructorOptions } from "electron";
import storesAndApisCanBeCreatedInjectable from "../../stores-apis-can-be-created.injectable";
import navigateToHelmChartsInjectable from "../../../common/front-end-routing/routes/cluster/helm/charts/navigate-to-helm-charts.injectable";
import hostedClusterInjectable from "../../../common/cluster-store/hosted-cluster.injectable";
import { ClusterFrameContext } from "../../cluster-frame-context/cluster-frame-context";
import type { Cluster } from "../../../common/cluster/cluster";
import { KubeObjectStore } from "../../../common/k8s-api/kube-object.store";
import clusterFrameContextInjectable from "../../cluster-frame-context/cluster-frame-context.injectable";
import startMainApplicationInjectable from "../../../main/start-main-application/start-main-application.injectable";
import startFrameInjectable from "../../start-frame/start-frame.injectable";
import { flushPromises } from "../../../common/test-utils/flush-promises";
import type { NamespaceStore } from "../+namespaces/store";
import namespaceStoreInjectable from "../+namespaces/store.injectable";
import historyInjectable from "../../navigation/history.injectable";
type Callback = (dis: DiContainers) => void | Promise<void>;
export interface ApplicationBuilder {
dis: DiContainers;
setEnvironmentToClusterFrame: () => ApplicationBuilder;
addExtensions: (...extensions: LensRendererExtension[]) => Promise<ApplicationBuilder>;
allowKubeResource: (resourceName: KubeResource) => ApplicationBuilder;
beforeApplicationStart: (callback: Callback) => ApplicationBuilder;
beforeRender: (callback: Callback) => ApplicationBuilder;
render: () => Promise<RenderResult>;
applicationMenu: {
click: (path: string) => Promise<void>;
};
preferences: {
close: () => void;
navigate: () => void;
navigation: {
click: (id: string) => void;
};
};
helmCharts: {
navigate: () => void;
};
}
interface DiContainers {
rendererDi: DiContainer;
mainDi: DiContainer;
}
interface Environment {
renderSidebar: () => React.ReactNode;
onAllowKubeResource: () => void;
}
export const getApplicationBuilder = () => {
const { rendererDi, mainDi } = getDisForUnitTesting({
doGeneralOverrides: true,
});
const dis = { rendererDi, mainDi };
const clusterStoreStub = {
provideInitialFromMain: () => {},
getById: (): null => null,
} as unknown as ClusterStore;
rendererDi.override(clusterStoreInjectable, () => clusterStoreStub);
rendererDi.override(storesAndApisCanBeCreatedInjectable, () => true);
mainDi.override(clusterStoreInjectable, () => clusterStoreStub);
const beforeApplicationStartCallbacks: Callback[] = [];
const beforeRenderCallbacks: Callback[] = [];
const extensionsState = observable.array<LensRendererExtension>();
rendererDi.override(subscribeStoresInjectable, () => () => () => {});
const environments = {
application: {
renderSidebar: () => null,
onAllowKubeResource: () => {
throw new Error(
"Tried to allow kube resource when environment is not cluster frame.",
);
},
} as Environment,
clusterFrame: {
renderSidebar: () => <Sidebar />,
onAllowKubeResource: () => {},
} as Environment,
};
let environment = environments.application;
rendererDi.override(
currentlyInClusterFrameInjectable,
() => environment === environments.clusterFrame,
);
rendererDi.override(rendererExtensionsInjectable, () =>
computed(() => extensionsState),
);
mainDi.override(mainExtensionsInjectable, () =>
computed(() => []),
);
let allowedResourcesState: IObservableArray<KubeResource>;
let rendered: RenderResult;
const builder: ApplicationBuilder = {
dis,
applicationMenu: {
click: async (path: string) => {
const applicationMenuItems = mainDi.inject(
applicationMenuItemsInjectable,
);
const menuItems = pipeline(
applicationMenuItems.get(),
flatMap(toFlatChildren(null)),
filter((menuItem) => !!menuItem.click),
);
const menuItem = menuItems.find((menuItem) => menuItem.path === path);
if (!menuItem) {
const availableIds = menuItems.map(get("path")).join('", "');
throw new Error(
`Tried to click application menu item with ID "${path}" which does not exist. Available IDs are: "${availableIds}"`,
);
}
menuItem.click?.(
{
menu: null as never,
commandId: 0,
...menuItem,
} as MenuItem,
undefined,
{},
);
await flushPromises();
},
},
preferences: {
close: () => {
const link = rendered.getByTestId("close-preferences");
fireEvent.click(link);
},
navigate: () => {
const navigateToPreferences = rendererDi.inject(navigateToPreferencesInjectable);
navigateToPreferences();
},
navigation: {
click: (id: string) => {
const link = rendered.queryByTestId(`tab-link-for-${id}`);
if (!link) {
const preferencesNavigationItems = rendererDi.inject(
preferenceNavigationItemsInjectable,
);
const availableIds = preferencesNavigationItems
.get()
.map(get("id"));
throw new Error(
`Tried to click navigation item "${id}" which does not exist in preferences. Available IDs are "${availableIds.join('", "')}"`,
);
}
fireEvent.click(link);
},
},
},
helmCharts: {
navigate: () => {
const navigateToHelmCharts = rendererDi.inject(navigateToHelmChartsInjectable);
navigateToHelmCharts();
},
},
setEnvironmentToClusterFrame: () => {
environment = environments.clusterFrame;
allowedResourcesState = observable.array();
rendererDi.override(allowedResourcesInjectable, () =>
computed(() => new Set([...allowedResourcesState])),
);
const clusterStub = {
accessibleNamespaces: [],
} as unknown as Cluster;
const namespaceStoreStub = {
contextNamespaces: [],
} as unknown as NamespaceStore;
const clusterFrameContextFake = new ClusterFrameContext(
clusterStub,
{
namespaceStore: namespaceStoreStub,
},
);
rendererDi.override(namespaceStoreInjectable, () => namespaceStoreStub);
rendererDi.override(hostedClusterInjectable, () => clusterStub);
rendererDi.override(clusterFrameContextInjectable, () => clusterFrameContextFake);
// Todo: get rid of global state.
KubeObjectStore.defaultContext.set(clusterFrameContextFake);
return builder;
},
addExtensions: async (...extensions) => {
const extensionRegistrators = rendererDi.injectMany(
extensionRegistratorInjectionToken,
);
const addAndEnableExtensions = async () => {
const registratorPromises = extensions.flatMap((extension) =>
extensionRegistrators.map((registrator) => registrator(extension, 1)),
);
await Promise.all(registratorPromises);
runInAction(() => {
extensions.forEach((extension) => {
extensionsState.push(extension);
});
});
};
if (rendered) {
await addAndEnableExtensions();
} else {
builder.beforeRender(addAndEnableExtensions);
}
return builder;
},
allowKubeResource: (resourceName) => {
environment.onAllowKubeResource();
runInAction(() => {
allowedResourcesState.push(resourceName);
});
return builder;
},
beforeApplicationStart(callback: (dis: DiContainers) => void) {
beforeApplicationStartCallbacks.push(callback);
return builder;
},
beforeRender(callback: (dis: DiContainers) => void) {
beforeRenderCallbacks.push(callback);
return builder;
},
async render() {
for (const callback of beforeApplicationStartCallbacks) {
await callback(dis);
}
const startMainApplication = mainDi.inject(startMainApplicationInjectable);
await startMainApplication();
const startFrame = rendererDi.inject(startFrameInjectable);
await startFrame();
const render = renderFor(rendererDi);
const history = rendererDi.inject(historyInjectable);
const currentRouteComponent = rendererDi.inject(currentRouteComponentInjectable);
for (const callback of beforeRenderCallbacks) {
await callback(dis);
}
rendered = render(
<Router history={history}>
{environment.renderSidebar()}
<Observer>
{() => {
const Component = currentRouteComponent.get();
if (!Component) {
return null;
}
return <Component />;
}}
</Observer>
</Router>,
);
return rendered;
},
};
return builder;
};
export type ToFlatChildren = (opts: MenuItemConstructorOptions) => (MenuItemOpts & { path: string })[];
function toFlatChildren(parentId: string | null | undefined): ToFlatChildren {
return ({ submenu = [], ...menuItem }) => [
{
...menuItem,
path: pipeline([parentId, menuItem.id], compact, join(".")),
},
...(
Array.isArray(submenu)
? submenu.flatMap(toFlatChildren(menuItem.id))
: [{
...submenu,
path: pipeline([parentId, menuItem.id], compact, join(".")),
}]
),
];
} | the_stack |
import * as cdk from '@aws-cdk/core';
import { CustomResource, Duration } from '@aws-cdk/core';
import * as cr from '@aws-cdk/custom-resources';
import { CloudFrontWebDistribution, OriginAccessIdentity, PriceClass, HttpVersion } from '@aws-cdk/aws-cloudfront';
import {
CfnUserPoolUser,
UserPoolClient,
UserPool,
CfnIdentityPool,
CfnIdentityPoolRoleAttachment,
} from '@aws-cdk/aws-cognito';
import { CanonicalUserPrincipal } from '@aws-cdk/aws-iam';
import { BucketEncryption } from '@aws-cdk/aws-s3';
import { requireProperty } from '@aws-cdk/core';
import ddb = require('@aws-cdk/aws-dynamodb');
import iam = require('@aws-cdk/aws-iam');
import cloudfront = require('@aws-cdk/aws-cloudfront');
import s3deploy = require('@aws-cdk/aws-s3-deployment');
import apigateway = require('@aws-cdk/aws-apigateway');
import lambda = require('@aws-cdk/aws-lambda');
import s3 = require('@aws-cdk/aws-s3');
import uuid = require('short-uuid');
require('dotenv').config();
export interface MTAStackProps {
email: string;
description: string;
}
export class MedicalTranscriptionAnalysisStack extends cdk.Stack {
uuid: string;
resourceName: (name: any) => string;
constructor(scope: cdk.Construct, id: string, props: MTAStackProps) {
super(scope, id, props);
this.resourceName = (name: any) => `${id}-${name}`.toLowerCase();
this.uuid = uuid.generate();
const corsRule = {
allowedOrigins: ['*'],
allowedMethods: [
s3.HttpMethods.HEAD,
s3.HttpMethods.GET,
s3.HttpMethods.PUT,
s3.HttpMethods.POST,
s3.HttpMethods.DELETE,
],
maxAge: 3000,
exposedHeaders: ['ETag'],
allowedHeaders: ['*'],
};
//S3 Bucket for Transcribe, Comprehend, and Audio
const storageS3Bucket = new s3.Bucket(this, this.resourceName('storageS3Bucket'), {
websiteIndexDocument: 'index.html',
cors: [corsRule],
// blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, change back
encryption: BucketEncryption.S3_MANAGED,
});
// ### Client ###
const webAppS3Bucket = new s3.Bucket(this, this.resourceName('webAppS3Bucket'), {
websiteIndexDocument: 'index.html',
cors: [corsRule],
blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL,
encryption: BucketEncryption.S3_MANAGED,
});
const oai = new OriginAccessIdentity(this, 'mta-oai', {
comment: 'Origin Access Identity for Medical Transcription Analysis web stack bucket cloudfront distribution',
});
const distribution = new CloudFrontWebDistribution(this, 'mta-cfront', {
originConfigs: [
{
behaviors: [{ isDefaultBehavior: true }],
s3OriginSource: {
s3BucketSource: webAppS3Bucket,
originAccessIdentity: oai,
},
},
],
errorConfigurations: [
{
errorCode: 404,
responseCode: 200,
errorCachingMinTtl: 5,
responsePagePath: '/index.html',
},
],
priceClass: PriceClass.PRICE_CLASS_100,
httpVersion: HttpVersion.HTTP2,
enableIpV6: true,
defaultRootObject: 'index.html',
});
const cloudfrontPolicyStatement = new iam.PolicyStatement({
actions: ['s3:GetBucket*', 's3:GetObject*', 's3:List*'],
resources: [webAppS3Bucket.bucketArn, `${webAppS3Bucket.bucketArn}/*`],
principals: [new CanonicalUserPrincipal(oai.cloudFrontOriginAccessIdentityS3CanonicalUserId)],
});
webAppS3Bucket.addToResourcePolicy(cloudfrontPolicyStatement);
const cloudfrontStorageBucketPolicyStatement = new iam.PolicyStatement({
actions: ['s3:GetBucket*', 's3:GetObject*', 's3:List*', 's3:PutObject'],
resources: [storageS3Bucket.bucketArn, `${storageS3Bucket.bucketArn}/*`],
principals: [new CanonicalUserPrincipal(oai.cloudFrontOriginAccessIdentityS3CanonicalUserId)],
});
webAppS3Bucket.addToResourcePolicy(cloudfrontPolicyStatement);
storageS3Bucket.addToResourcePolicy(cloudfrontStorageBucketPolicyStatement);
// ####### Cognito User Authentication #######
const mtaUserPool = new UserPool(this, 'mta-user-pool', {
userPoolName: 'mta-user-pool',
autoVerify: { email: true },
passwordPolicy: {
minLength: 8,
requireUppercase: true,
requireDigits: true,
requireSymbols: true,
},
userInvitation: {
emailSubject: 'Your MTA login',
emailBody: `<p>You are invited to try the Medical Transcription Analysis Solution. Your credentials are:</p> \
<p> \
Username: <strong>{username}</strong><br /> \
Password: <strong>{####}</strong> \
</p> \
<p> \
Please sign in with the user name and your temporary password provided above at: <br /> \
https://${distribution.domainName} \
</p>`,
},
});
new cdk.CfnOutput(this, 'MTAUserPoolId', { value: mtaUserPool.userPoolId });
// Depends upon all other parts of the stack having been created.
const mtaUserPoolUser = new CfnUserPoolUser(this, 'mta-user-pool-user', {
desiredDeliveryMediums: ['EMAIL'],
forceAliasCreation: false,
userPoolId: mtaUserPool.userPoolId,
userAttributes: [
{
name: 'email',
value: props.email,
},
],
username: props.email.replace(/@/, '.'),
});
const mtaUserPoolClient = new UserPoolClient(this, 'mta-user-pool-client', {
userPoolClientName: 'mta_app',
userPool: mtaUserPool,
});
const mtaIdentityPool = new CfnIdentityPool(this, 'mta-identity-pool', {
identityPoolName: 'mtaUserIdentityPool',
allowUnauthenticatedIdentities: true,
cognitoIdentityProviders: [
{
clientId: mtaUserPoolClient.userPoolClientId,
providerName: mtaUserPool.userPoolProviderName,
serverSideTokenCheck: false,
},
],
});
const cognitoPolicy = new iam.Policy(this, 'mta-cognito-policy', {
statements: [
new iam.PolicyStatement({
actions: ['cognito-identity:GetId'],
resources: ['*'],
effect: iam.Effect.ALLOW,
}),
new iam.PolicyStatement({
actions: ['transcribe:*', 'comprehendmedical:*'],
resources: ['*'],
effect: iam.Effect.ALLOW,
}),
new iam.PolicyStatement({
actions: ['s3:GetObject*', 's3:List*', 's3:PutObject'],
resources: [storageS3Bucket.bucketArn, `${storageS3Bucket.bucketArn}/*`],
effect: iam.Effect.ALLOW,
}),
],
});
const cognitoPolicyResource = cognitoPolicy.node.findChild('Resource') as iam.CfnPolicy;
cognitoPolicyResource.cfnOptions.metadata = {
cfn_nag: {
rules_to_suppress: [
{
id: 'W11',
reason: 'The resources in the policy are created/managed by this solution.',
},
],
},
};
const mtaCognitoAuthenticatedRole = new iam.Role(this, 'mta-cognito-authenticated-role', {
assumedBy: new iam.FederatedPrincipal(
'cognito-identity.amazonaws.com',
{
StringEquals: {
'cognito-identity.amazonaws.com:aud': mtaIdentityPool.ref,
},
'ForAnyValue:StringLike': {
'cognito-identity.amazonaws.com:amr': 'authenticated',
},
},
'sts:AssumeRoleWithWebIdentity',
),
path: '/',
});
cognitoPolicy.attachToRole(mtaCognitoAuthenticatedRole);
const mtaIdentityPoolRoleAttachment = new CfnIdentityPoolRoleAttachment(this, 'mta-identity-role-pool-attachment', {
identityPoolId: mtaIdentityPool.ref,
roles: {
authenticated: mtaCognitoAuthenticatedRole.roleArn,
},
});
const yarnBotoLoc = lambda.Code.fromAsset('lambda/boto3');
const boto3Layer = new lambda.LayerVersion(this, this.resourceName('Boto3'), {
code: yarnBotoLoc,
compatibleRuntimes: [lambda.Runtime.PYTHON_3_8],
license: 'Apache-2.0',
});
const transcriberRole = new iam.Role(this, this.resourceName('TranscriberRole'), {
assumedBy: new iam.ServicePrincipal('iam.amazonaws.com'),
});
transcriberRole.assumeRolePolicy?.addStatements(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: ['sts:AssumeRole'],
principals: [new iam.AccountRootPrincipal()],
}),
);
transcriberRole.addToPolicy(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
resources: ['*'],
actions: [
'transcribe:StartStreamTranscriptionWebSocket',
'transcribe:StartMedicalStreamTranscription',
'comprehendmedical:InferICD10CM',
'comprehendmedical:InferRxNorm',
'comprehendmedical:DetectEntitiesV2',
],
}),
);
// Dynamodb
const TableSessions = new ddb.Table(this, 'TableSessions', {
tableName: 'Sessions',
partitionKey: { name: 'PatientId', type: ddb.AttributeType.STRING },
sortKey: { name: 'SessionId', type: ddb.AttributeType.STRING },
serverSideEncryption: true,
});
TableSessions.addGlobalSecondaryIndex({
indexName: 'hcpIndex',
partitionKey: { name: 'HealthCareProfessionalId', type: ddb.AttributeType.STRING },
sortKey: { name: 'SessionId', type: ddb.AttributeType.STRING },
});
const TablePatients = new ddb.Table(this, 'TablePatients', {
tableName: 'Patients',
partitionKey: { name: 'PatientId', type: ddb.AttributeType.STRING },
serverSideEncryption: true,
});
const TableHealthCareProfessionals = new ddb.Table(this, 'TableHealthCareProfessionals', {
tableName: 'HealthCareProfessionals',
partitionKey: { name: 'HealthCareProfessionalId', type: ddb.AttributeType.STRING },
serverSideEncryption: true,
});
// Lambda
/* MTAApiProcessor */
const onEventAthenaLambda = new lambda.Function(this, this.resourceName('MTAOnEventAthenaLambda'), {
runtime: lambda.Runtime.PYTHON_3_8,
code: lambda.Code.asset('lambda/custom_resource_athena/'),
handler: 'lambda_function.lambda_handler',
timeout: cdk.Duration.seconds(60),
environment: {
BUCKET_NAME: storageS3Bucket.bucketName,
},
});
onEventAthenaLambda.addLayers(boto3Layer);
onEventAthenaLambda.addToRolePolicy(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: [
'athena:StartQueryExecution',
'athena:CreateNamedQuery',
'athena:DeleteNamedQuery',
'athena:GetQueryResults',
'athena:CreateWorkGroup',
'athena:DeleteWorkGroup',
],
resources: ['*'],
}),
);
onEventAthenaLambda.addToRolePolicy(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: ['s3:PutObject', 's3:GetObject', 's3:AbortMultipartUpload'],
resources: ['*'],
}),
);
onEventAthenaLambda.addToRolePolicy(
new iam.PolicyStatement({
effect: iam.Effect.ALLOW,
actions: ['glue:*'],
resources: ['*'],
}),
);
const apiProcessor = new lambda.Function(this, this.resourceName('MTAApiProcessor'), {
runtime: lambda.Runtime.PYTHON_3_8,
code: lambda.Code.asset('lambda'),
handler: 'lambda_function.lambda_handler',
timeout: cdk.Duration.seconds(60),
environment: {
TRANSCRIBE_ACCESS_ROLEARN: transcriberRole.roleArn,
BUCKET_NAME: storageS3Bucket.bucketName,
},
});
TableHealthCareProfessionals.grantReadWriteData(apiProcessor);
TablePatients.grantReadWriteData(apiProcessor);
TableSessions.grantReadWriteData(apiProcessor);
storageS3Bucket.grantReadWrite(apiProcessor);
storageS3Bucket.grantReadWrite(onEventAthenaLambda);
apiProcessor.addToRolePolicy(
new iam.PolicyStatement({
actions: ['sts:AssumeRole'],
effect: iam.Effect.ALLOW,
resources: [transcriberRole.roleArn],
}),
);
apiProcessor.addToRolePolicy(
new iam.PolicyStatement({
actions: ['translate:TranslateText'],
effect: iam.Effect.ALLOW,
resources: ['*'], // * permsissions needs to be provided for Translate APIs : https://docs.aws.amazon.com/translate/latest/dg/translate-api-permissions-ref.html
}),
);
apiProcessor.addLayers(boto3Layer);
const api = new apigateway.LambdaRestApi(this, this.resourceName('MTADemoAPI'), {
handler: apiProcessor,
proxy: false,
deployOptions: {
loggingLevel: apigateway.MethodLoggingLevel.INFO,
dataTraceEnabled: false,
},
});
const reqValidator = new apigateway.RequestValidator(this, this.resourceName('apigwResourceValidator'), {
restApi: api,
validateRequestBody: true,
validateRequestParameters: true,
});
//one authorizer
const authorizer = new apigateway.CfnAuthorizer(this, 'Authorizer', {
identitySource: 'method.request.header.Authorization',
name: 'Authorization',
type: 'COGNITO_USER_POOLS',
providerArns: [mtaUserPool.userPoolArn],
restApiId: api.restApiId,
});
function addCorsOptionsAndMethods(apiResource: apigateway.IResource | apigateway.Resource, methods: string[] | []) {
const options = apiResource.addMethod(
'OPTIONS',
new apigateway.MockIntegration({
integrationResponses: [
{
statusCode: '200',
responseParameters: {
'method.response.header.Access-Control-Allow-Headers':
"'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'",
'method.response.header.Access-Control-Allow-Origin': "'*'",
'method.response.header.Access-Control-Allow-Credentials': "'false'",
'method.response.header.Access-Control-Allow-Methods': "'OPTIONS,GET,PUT,POST,DELETE'",
},
},
],
passthroughBehavior: apigateway.PassthroughBehavior.NEVER,
requestTemplates: {
'application/json': '{"statusCode": 200}',
},
}),
{
methodResponses: [
{
statusCode: '200',
responseParameters: {
'method.response.header.Access-Control-Allow-Headers': true,
'method.response.header.Access-Control-Allow-Methods': true,
'method.response.header.Access-Control-Allow-Credentials': true,
'method.response.header.Access-Control-Allow-Origin': true,
},
},
],
requestValidator: reqValidator,
},
);
methods.forEach((method) => {
apiResource.addMethod(method, undefined, {
authorizationType: apigateway.AuthorizationType.COGNITO,
authorizer: {
authorizerId: `${authorizer.ref}`,
},
});
});
}
addCorsOptionsAndMethods(api.root, []);
const getCredentials = api.root.addResource('getCredentials');
addCorsOptionsAndMethods(getCredentials, ['GET', 'POST']);
const createSessionResource = api.root.addResource('createSession');
addCorsOptionsAndMethods(createSessionResource, ['POST']);
const listSessionsResource = api.root.addResource('listSessions');
addCorsOptionsAndMethods(listSessionsResource, ['GET']);
const listPatientsResource = api.root.addResource('listPatients');
addCorsOptionsAndMethods(listPatientsResource, ['GET']);
const createPatientResource = api.root.addResource('createPatient');
addCorsOptionsAndMethods(createPatientResource, ['POST']);
const listHealthCareProfessionalsResource = api.root.addResource('listHealthCareProfessionals');
addCorsOptionsAndMethods(listHealthCareProfessionalsResource, ['GET']);
const createHealthCareProfessionalResource = api.root.addResource('createHealthCareProfessional');
addCorsOptionsAndMethods(createHealthCareProfessionalResource, ['POST']);
const getTranscriptionComprehendResource = api.root.addResource('getTranscriptionComprehend');
addCorsOptionsAndMethods(getTranscriptionComprehendResource, ['GET']);
const getTranscriptionTranslationResource = api.root.addResource('getTranscriptionTranslation');
addCorsOptionsAndMethods(getTranscriptionTranslationResource, ['GET']);
cognitoPolicy.addStatements(
new iam.PolicyStatement({
actions: ['execute-api:Invoke'],
resources: [api.arnForExecuteApi()],
effect: iam.Effect.ALLOW,
}),
);
// Custom Resource
const athenaProvider = new cr.Provider(this, this.resourceName('athenaProvider'), {
onEventHandler: onEventAthenaLambda,
});
const athenaCustomResource = new CustomResource(this, this.resourceName('athenaCustomResource'), {
serviceToken: athenaProvider.serviceToken,
});
}
} | the_stack |
import {
BooleanFilterConditions,
DateFilterConditions,
FilterVerbs,
IntFilterConditions,
SelectFilterConditions,
StringFilterConditions,
getDefaultFilterCondition,
} from "..";
import { Button, FormControl, Input, Select, Tooltip } from "@chakra-ui/react";
import { CalendarIcon } from "@heroicons/react/outline";
import { Column } from "@/features/fields/types";
import { FilterConditions, IFilter, IFilterGroup } from "../types";
import { GenericEvent } from "@/types";
import { isArray, isDate, isUndefined } from "lodash";
import { useFilters } from "@/features/records/hooks";
import ConditionComponent from "@/features/tables/components/ConditionComponent";
import DatePicker from "react-datepicker";
import FilterTrashIcon from "./FilterTrashIcon"
import React, { forwardRef, memo, useMemo, useState } from "react";
import VerbComponent from "./VerbComponent";
const IS_VALUES = {
today: "today",
tomorrow: "tomorrow",
yesterday: "yesterday",
one_week_ago: "one_week_ago",
one_week_from_now: "one_week_from_now",
one_month_ago: "one_month_ago",
one_month_from_now: "one_month_from_now",
exact_date: "exact_date",
};
const WITHIN_VALUES = {
past_week: "past_week",
next_week: "next_week",
past_month: "past_month",
next_month: "next_month",
past_year: "past_year",
next_year: "next_year",
};
const CONDITIONS_WITHOUT_VALUE = [
IntFilterConditions.is_null,
IntFilterConditions.is_not_null,
StringFilterConditions.is_empty,
StringFilterConditions.is_not_empty,
StringFilterConditions.is_null,
StringFilterConditions.is_not_null,
BooleanFilterConditions.is_true,
BooleanFilterConditions.is_false,
BooleanFilterConditions.is_null,
BooleanFilterConditions.is_not_null,
DateFilterConditions.is_null,
DateFilterConditions.is_not_null,
SelectFilterConditions.is_empty,
SelectFilterConditions.is_not_empty,
SelectFilterConditions.is_null,
SelectFilterConditions.is_not_null,
];
// This input is used for selecting exact date for date filter.
const CustomDateInput = forwardRef(
(
{
onClick,
}: {
onClick?: (e: GenericEvent) => void;
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
ref: any
) => {
return (
<Button
size="xs"
onClick={onClick}
ref={ref}
className="p-0 flex h-full w-full justify-center items-center"
>
<CalendarIcon className="h-3" />
</Button>
);
}
);
CustomDateInput.displayName = "CustomDateInput";
const TextInputSelector = ({
value,
onChange,
onBlur,
}: {
value: string | undefined;
onChange: (e: GenericEvent) => void;
onBlur: (e: GenericEvent) => void;
}) => (
<FormControl>
<Input size="xs" value={value} onBlur={onBlur} onChange={onChange} />
</FormControl>
);
const SelectFieldTypeSelector = ({
options,
defaultValue,
onChange,
}: {
options: string | undefined;
defaultValue: string | undefined;
onChange: (e: GenericEvent) => void;
}) => (
<FormControl>
<Select size="xs" defaultValue={defaultValue} onChange={onChange}>
{options &&
options.split(",").map((option: string, index: number) => (
<option key={index} value={option.trim()}>
{option.trim()}
</option>
))}
</Select>
</FormControl>
);
const DateSelector = ({
filter,
onChange,
onChangeDate,
}: {
filter: IFilter;
onChange: (e: GenericEvent) => void;
onChangeDate: (date: Date | [Date | null, Date | null] | null) => void;
}) => {
const options = useMemo(
() =>
filter.condition === DateFilterConditions.is_within
? WITHIN_VALUES
: IS_VALUES,
[filter.condition]
);
return (
<FormControl>
<div className="flex space-x-1">
<Tooltip label="Dates are in server timezone (UTC)." fontSize="xs">
<Select size="xs" value={filter.option} onChange={onChange}>
{Object.entries(options).map(([id, label]) => (
<option key={id} value={id}>
{label.replaceAll("_", " ")}
</option>
))}
</Select>
</Tooltip>
{filter.option === "exact_date" && (
<div className="flex-1">
<DatePicker
showTimeSelect={true}
selected={
filter.value !== ""
? new Date(filter.value as string)
: new Date()
}
onChange={onChangeDate}
customInput={<CustomDateInput />}
/>
</div>
)}
</div>
</FormControl>
);
};
const Filter = ({
columns,
filter,
idx,
parentIdx,
}: {
columns: Column[];
filter: IFilter;
idx: number;
parentIdx?: number;
}) => {
const { filters, removeFilter, updateFilter } = useFilters();
const [localInputValue, setLocalInputValue] = useState(filter.value);
const isDateFilter = useMemo(
() => filter.column.fieldType === "DateTime",
[filter.column.fieldType]
);
const isSelectFilter = useMemo(
() => filter.column.fieldType === "Select",
[filter.column.fieldType]
);
const isSelectWithValueInput = useMemo(
() =>
filter.condition === SelectFilterConditions.contains ||
filter.condition === SelectFilterConditions.not_contains,
[filter.condition]
);
const changeFilterColumn = (columnName: string) => {
const column = columns.find((c) => c.name === columnName) as Column;
// Get the default condition and option/value for the new filter.
const condition = getDefaultFilterCondition(column.fieldType);
let option;
if (column.fieldType === "DateTime") {
option = "today";
}
let value;
if (column.fieldType === "Select") {
value =
(column?.fieldOptions?.options as string).split(",")[0].trim() || "";
}
// If the filter is in a group (!isUndefined(parentIdx)), we need to update the filters array of that group.
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
newFilters[idx] = {
...groupFilter.filters[idx],
column,
columnName,
condition,
option,
value,
};
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
updateFilter(idx, {
...filter,
column,
columnName,
condition,
option,
value,
});
}
};
const changeFilterCondition = (condition: FilterConditions) => {
let option;
if (isDateFilter) {
if (condition === DateFilterConditions.is_within) {
option = "past_week";
} else if (
condition === DateFilterConditions.is ||
condition === DateFilterConditions.is_not ||
condition === DateFilterConditions.is_before ||
condition === DateFilterConditions.is_after ||
condition === DateFilterConditions.is_on_or_before ||
condition === DateFilterConditions.is_on_or_after
) {
option = "today";
}
}
const isSelectFilterWithoutValue =
isSelectFilter &&
(condition === SelectFilterConditions.is_empty ||
condition === SelectFilterConditions.is_not_empty ||
condition === SelectFilterConditions.is_null ||
condition === SelectFilterConditions.is_not_null);
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
newFilters[idx] = {
...groupFilter.filters[idx],
condition,
option,
value: isSelectFilterWithoutValue ? "" : groupFilter.filters[idx].value,
};
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
updateFilter(idx, {
...filter,
condition,
option,
value: isSelectFilterWithoutValue ? "" : filter.value,
});
}
};
const changeFilterOption = (option: string) => {
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
newFilters[idx] = {
...groupFilter.filters[idx],
option,
value: "",
};
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
updateFilter(idx, {
...filter,
option,
value: "",
});
}
};
const changeFilterValue = (value: string) => {
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
newFilters[idx] = {
...groupFilter.filters[idx],
value,
};
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
updateFilter(idx, {
...filter,
value,
});
}
};
const changeFilterVerb = (verb: FilterVerbs) => {
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
newFilters.forEach(
(filter, i) =>
(newFilters[i] = {
...groupFilter.filters[i],
verb,
})
);
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
updateFilter(idx, {
...filter,
verb,
});
}
};
const handleRemoveFilter = () => {
if (!isUndefined(parentIdx)) {
const groupFilter = filters[parentIdx] as IFilterGroup;
const newFilters = [...groupFilter.filters];
if (newFilters.length > 1) {
newFilters.splice(idx, 1);
updateFilter(parentIdx, {
...groupFilter,
filters: newFilters,
});
} else {
removeFilter(parentIdx);
}
} else {
removeFilter(idx);
}
};
const handleChangeDate = (date: Date | [Date | null, Date | null] | null) => {
const value = isArray(date) ? [date[0], date[1]] : date;
if (isDate(value)) {
value.setUTCHours(0, 0, 0, 0);
changeFilterValue(value.toUTCString());
}
};
const isWithoutValue = useMemo(
() => CONDITIONS_WITHOUT_VALUE.includes(filter.condition),
[filter.condition]
);
const hasTextInputSelector = useMemo(
() =>
(!isSelectFilter && !isDateFilter) ||
(isSelectFilter && isSelectWithValueInput),
[isSelectFilter, isDateFilter, isSelectWithValueInput]
);
const isSelectFieldType = useMemo(
() => isSelectFilter && !isSelectWithValueInput,
[isSelectFilter, isSelectWithValueInput]
);
const ColumnsSelector = () => (
<FormControl className="flex-1">
<Select
size="xs"
value={filter.columnName}
onChange={(e) => changeFilterColumn(e.currentTarget.value)}
>
{columns &&
columns
.filter((column) => !column.baseOptions.computed)
.map((column, idx) => (
<option key={idx} value={column.name}>
{column.label}
</option>
))}
</Select>
</FormControl>
);
const ValueSelector = useMemo(() => {
if (isWithoutValue) return <div></div>;
if (isSelectFieldType)
return (
<SelectFieldTypeSelector
defaultValue={filter.value}
options={filter.column?.fieldOptions?.options as string}
onChange={(e: GenericEvent) =>
changeFilterValue(e?.currentTarget?.value)
}
/>
);
if (isDateFilter)
return (
<DateSelector
filter={filter}
onChange={(e: GenericEvent) =>
changeFilterOption(e.currentTarget.value)
}
onChangeDate={handleChangeDate}
/>
);
if (hasTextInputSelector)
return (
<TextInputSelector
value={localInputValue}
onChange={(e: GenericEvent) =>
setLocalInputValue(e.currentTarget.value)
}
onBlur={() => changeFilterValue(localInputValue || "")}
/>
);
return <div></div>;
}, [localInputValue, isSelectFieldType, isDateFilter, hasTextInputSelector]);
return (
<>
<div className="flex-1 flex flex-col sm:flex-row w-full sm:w-auto sm:items-center sm:space-x-2">
<div className="flex w-16">
<VerbComponent
idx={idx}
verb={filter.verb}
onChange={(value: FilterVerbs) => changeFilterVerb(value)}
/>
</div>
<div className="flex-1 grid grid-cols-3 gap-2 font-mono">
<ColumnsSelector />
<ConditionComponent
filter={filter}
onChange={(value: FilterConditions) => changeFilterCondition(value)}
/>
{ValueSelector}
</div>
<FilterTrashIcon onClick={handleRemoveFilter} />
</div>
</>
);
};
export default memo(Filter); | the_stack |
import "../AsyncSupport";
import "../XMLDomInit";
import xmldom from "xmldom";
import test from "ava";
import sinon from "sinon";
import GrimoireInterface from "../../src/Interface/GrimoireInterface";
import Constants from "../../src/Base/Constants";
import Component from "../../src/Node/Component";
import GomlParser from "../../src/Node/GomlParser";
import GomlLoader from "../../src/Node/GomlLoader";
import NSIdentity from "../../src/Base/NSIdentity";
import Namespace from "../../src/Base/Namespace";
import GomlNode from "../../src/Node/GomlNode";
declare namespace global {
let Node: any;
let document: any;
}
global.Node = {
ELEMENT_NODE: 1
};
global.document = new DOMParser().parseFromString("<html></html>", "text/html");
test.beforeEach(() => {
GrimoireInterface.clear();
GrimoireInterface.resolvePlugins();
});
test("ns method should generate namespace generating function correctly", (t) => {
const g = Namespace.define("grimoire");
t.truthy(g.for("test").fqn === "grimoire.test");
});
test("registerComponent works correctly", (t) => {
const l = GrimoireInterface.componentDeclarations.toArray().length;
const dec = GrimoireInterface.registerComponent("Name", {
attributes: {
attr: { converter: "String", default: "aaa" }
}
});
t.truthy(dec.attributes["attr"].default === "aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === l + 1);
t.throws(() => {
GrimoireInterface.registerComponent("Name", {
attributes: {
attr: { converter: "String", default: undefined }
}
});
});
class Hoo {
public static attributes = {
};
}
t.throws(() => {
GrimoireInterface.registerComponent("Name", Hoo); // because not extends Component.
});
});
test("registerComponent by object works correctly", async (t) => {
const defaultComponentCount = GrimoireInterface.componentDeclarations.toArray().length;
GrimoireInterface.registerComponent("Aaa", {
attributes: {
testValue: {
converter: "String",
default: "bbb"
},
testOverride: {
converter: "String",
default: "bbb"
}
},
hoge: 0,
$test: function() {
this.hoge += 1;
}
});
const aaa = GrimoireInterface.componentDeclarations.get("Aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 1);
t.truthy(aaa.attributes.testValue);
t.truthy(aaa.isDependenyResolved); // because no inherits.
const aaa2 = aaa.generateInstance();
const aaa22 = aaa.generateInstance();
t.truthy(aaa2 instanceof Component);
t.truthy(aaa2.attributes.get("testValue"));
t.truthy(aaa2.enabled);
t.truthy(aaa22.enabled);
aaa2.enabled = false;
t.truthy(!aaa2.enabled);
t.truthy(aaa22.enabled);
(aaa2 as any).$test();
t.truthy((aaa2 as any).hoge === 1);
t.truthy((aaa22 as any).hoge === 0);
GrimoireInterface.registerComponent("Bbb", {
attributes: {
testValue2: {
converter: "String",
default: "ccc"
},
testOverride: {
converter: "String",
default: "ccc"
}
},
$test2: function() {
// do nothing.
}
}, "Aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 2);
const bbb = GrimoireInterface.componentDeclarations.get("Bbb");
t.truthy(!bbb.isDependenyResolved); // because bbb is inherits Aaa.
await GrimoireInterface.resolvePlugins();
t.truthy(bbb.isDependenyResolved);
t.truthy(bbb.attributes.testValue); // from Aaa
t.truthy(bbb.attributes.testValue2); // from Bbb
t.truthy(bbb.attributes.testOverride.default === "ccc"); // override attribute with inherits correctly.
const bbb2 = bbb.generateInstance();
t.truthy(bbb2.attributes.get("testValue")); // inherits attr from Aaa
t.truthy(bbb2.attributes.get("testValue2")); // attr defined by Bbb
t.truthy((bbb2 as any).$test);
t.truthy((bbb2 as any).$test2);
});
test("registerComponent by class works correctly", async (t) => {
const defaultComponentCount = GrimoireInterface.componentDeclarations.toArray().length;
class Aaa extends Component {
public static attributes = {
testValue: {
converter: "String",
default: "bbb"
},
testOverride: {
converter: "String",
default: "bbb"
}
};
public hoge = 0;
public $test() {
this.hoge += 1;
}
public overridedFunc() {
return this.hoge;
}
}
class Bbb extends Component {
public static attributes = {
testValue2: {
converter: "String",
default: "ccc"
},
testOverride: {
converter: "String",
default: "ccc"
}
};
public fuga = 7;
public $test2() {
return this.fuga;
}
public overridedFunc() {
return this.$test2();
}
}
GrimoireInterface.registerComponent("Aaa", Aaa);
const aaa = GrimoireInterface.componentDeclarations.get("Aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 1);
t.truthy(aaa.attributes.testValue);
t.truthy(aaa.isDependenyResolved); // because no inherits.
const aaa2 = aaa.generateInstance();
const aaa22 = aaa.generateInstance();
t.truthy(aaa2 instanceof Component);
t.truthy(aaa2.attributes.get("testValue"));
t.truthy(aaa2.enabled);
t.truthy(aaa22.enabled);
aaa2.enabled = false;
t.truthy(!aaa2.enabled);
t.truthy(aaa22.enabled);
(aaa2 as any).$test();
t.truthy((aaa2 as any).hoge === 1);
t.truthy((aaa22 as any).hoge === 0);
GrimoireInterface.registerComponent("Bbb", Bbb, "Aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 2);
const bbb = GrimoireInterface.componentDeclarations.get("Bbb");
t.truthy(!bbb.isDependenyResolved);
await GrimoireInterface.resolvePlugins();
t.truthy(bbb.isDependenyResolved);
const bbb2 = bbb.generateInstance();
t.truthy(bbb2.attributes.get("testValue"));
t.truthy(bbb2.attributes.get("testValue2"));
t.truthy(bbb2.attributes.get("testOverride"));
t.truthy(bbb.attributes.testValue);
t.truthy(bbb.attributes.testValue2);
t.truthy(bbb.attributes.testOverride.default === "ccc");
t.truthy((bbb2 as any).$test);
t.truthy((bbb2 as any).$test2);
t.truthy((bbb2 as any).fuga === 7);
t.truthy((bbb2 as any).hoge === 0);
(bbb2 as any).$test();
t.truthy((bbb2 as any).hoge === 1);
t.truthy((bbb2 as any).overridedFunc() === 7);
});
test("registerComponent works correctly4", async (t) => {
const defaultComponentCount = GrimoireInterface.componentDeclarations.toArray().length;
class Aaa extends Component {
public static attributes: { [key: string]: any } = {
testValue: {
converter: "String",
default: "bbb"
},
testOverride: {
converter: "String",
default: "bbb"
}
};
public hoge = 0;
public $test() {
this.hoge += 1;
}
}
class Bbb2 extends Aaa {
public static attributes = {
testValue2: {
converter: "String",
default: "bbb"
},
testOverride: {
converter: "String",
default: "ccc"
}
};
public fuga = 7;
public $test2() {
// do nothing.
}
}
GrimoireInterface.registerComponent("Aaa", Aaa);
const aaa = GrimoireInterface.componentDeclarations.get("Aaa");
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 1);
t.truthy(aaa.attributes.testValue);
t.truthy(aaa.isDependenyResolved); // because no inherits.
const aaa2 = aaa.generateInstance();
const aaa22 = aaa.generateInstance();
t.truthy(aaa2 instanceof Component);
t.truthy(aaa2.attributes.get("testValue"));
t.truthy(aaa2.enabled);
t.truthy(aaa22.enabled);
aaa2.enabled = false;
t.truthy(!aaa2.enabled);
t.truthy(aaa22.enabled);
(aaa2 as any).$test();
t.truthy((aaa2 as any).hoge === 1);
t.truthy((aaa22 as any).hoge === 0);
GrimoireInterface.registerComponent("Bbb", Bbb2);
t.truthy(GrimoireInterface.componentDeclarations.toArray().length === defaultComponentCount + 2);
const bbb = GrimoireInterface.componentDeclarations.get("Bbb");
await GrimoireInterface.resolvePlugins();
t.truthy(aaa.isDependenyResolved);
t.truthy(bbb.isDependenyResolved);
const bbb2 = bbb.generateInstance();
t.truthy(bbb2.attributes.get("testValue"));
t.truthy(bbb.attributes.testValue);
t.truthy(bbb.attributes.testValue2);
t.truthy(bbb.attributes.testOverride.default === "ccc");
t.truthy((bbb2 as any).$test);
t.truthy((bbb2 as any).$test2);
t.truthy((bbb2 as any).fuga === 7);
t.truthy((bbb2 as any).hoge === 0);
(bbb2 as any).$test();
t.truthy((bbb2 as any).hoge === 1);
});
test("registerNode/Component works correctly.", async t => {
GrimoireInterface.registerNode("a1");
GrimoireInterface.registerNode("a2", ["Hoge"]);
GrimoireInterface.registerNode("a3", [], { hoge: 7 }, "a2");
GrimoireInterface.registerComponent("Hoge", {
attributes: {
hoge: {
converter: "Number",
default: 9
}
}
});
await GrimoireInterface.resolvePlugins();
let a1 = GrimoireInterface.nodeDeclarations.get("a1");
let a2 = GrimoireInterface.nodeDeclarations.get("a2");
let a3 = GrimoireInterface.nodeDeclarations.get("a3");
t.truthy(a1.defaultComponentsActual.toArray().length === 1); // grimoireCompone
t.truthy(a2.defaultComponentsActual.toArray().length === 2); // grimoireCompone
t.truthy(a3.defaultComponentsActual.toArray().length === 2); // grimoireCompone
// console.log(a2.idResolver)
t.truthy(a2.idResolver.resolve(Namespace.define("hoge")) === "grimoirejs.Hoge.hoge");
t.truthy(a3.idResolver.resolve(Namespace.define("hoge")) === "grimoirejs.Hoge.hoge");
});
test("throw error on attempt registerComponent/Node by duplicate name.", t => {
GrimoireInterface.registerComponent("Aaa", { attributes: {} });
GrimoireInterface.registerNode("node");
t.throws(() => {
GrimoireInterface.registerComponent("Aaa", {} as any);
});
t.throws(() => {
GrimoireInterface.registerNode("node");
});
});
test("register and resolvePlugins works preperly", async () => {
const spy1 = sinon.spy();
const spy2 = sinon.spy();
const wrapPromise: any = function(spy) {
return () => {
return new Promise(resolve => {
spy();
resolve(null);
});
};
};
const spyp = wrapPromise(spy1);
const spyp2 = wrapPromise(spy2);
GrimoireInterface.register(spyp);
GrimoireInterface.register(spyp2);
await GrimoireInterface.resolvePlugins();
sinon.assert.callOrder(spy1, spy2);
}); | the_stack |
import ServiceCheck, {
formatHumanReadable,
formatMarkdown,
formatTimePeriod,
} from "../check";
import checkSchemaResult from "../../../../__fixtures__/check-schema-result";
import { ChangeSeverity } from "apollo-language-server/lib/graphqlTypes";
import chalk from "chalk";
import { stdout } from "stdout-stderr";
import * as graphql from "graphql";
import { graphqlTypes } from "apollo-language-server";
import nock from "nock";
import stripAnsi from "strip-ansi";
/**
* Single URL for all local requests to be mocked
*/
const localURL = "http://localhost:4000";
/**
* Default API key. This is not an actual API key but a randomly generated string.
*
* If you need to use the `nock` recorder, then this will not work because we won't be able to access Apollo
* with a fake API key.
*/
const fakeApiKey = "service:engine:9YC5AooMa2yO11eFlZat11";
/**
* An array that we'll spread into all CLI commands to pass the Apollo api key.
*/
const cliKeyParameter = [`--key=${fakeApiKey}`];
/**
* The original `console.log` being mocked.
*
* We save it so we can restore it after a test.
*/
let mockedConsoleLogOriginal: Console["log"] | null = null;
/**
* Array of intercepted console values.
*/
let mockedConsoleLogValues: string[] | null = null;
// Get original CI environment variables
const { CI, CIRCLECI, GITHUB_ACTION, BUILD_BUILDURI } = process.env;
// TODO: the following two functions are identical to the ones found in list.test.ts
// we are choosing to duplicate them for now, because with a shared helper function,
// jest overwrites console log output as the tests are run in parallel
/**
* Mock and capture `console.log` and `stdout.write`s. Return them in that order as a single string.
*
* This will emulate what the output of running the CLI would look like.
*
* Call `uncaptureApplicationOutput` to reverse the effects of this function.
*/
function captureApplicationOutput() {
mockedConsoleLogOriginal = console["log"];
mockedConsoleLogValues = [];
console["log"] = jest.fn((...items) => {
if (!mockedConsoleLogValues) {
throw new Error(
"mockedConsoleLogValues is not prepared but we're still capturing console.log. This means there's a bug somewhere."
);
}
mockedConsoleLogValues.push(items.join(" "));
});
stdout.start();
}
/**
* Reverse mocking of `console.log` and `stdout.write`. If they weren't mocked to begin with, this will do
* nothing and return null.
*/
function uncaptureApplicationOutput(): string | null {
// These will be `null` if we haven't mocked `console.log`.
if (!mockedConsoleLogOriginal || !mockedConsoleLogValues) {
return null;
}
const result = mockedConsoleLogValues.concat(stdout.output).join("\n");
mockedConsoleLogValues = null;
// Restore `console.log`
console["log"] = mockedConsoleLogOriginal;
// Stop capturing `stdout`.
stdout.stop();
return result;
}
/**
* Convert a schema SDL to an introspection query result.
*
* @see https://blog.apollographql.com/three-ways-to-represent-your-graphql-schema-a41f4175100d
*
* @param schemaSdl string Schema in SDL form
*/
function sdlToIntrospectionQueryResult(schemaSdl: string) {
return graphql.graphqlSync(
graphql.buildSchema(schemaSdl),
graphql.getIntrospectionQuery()
).data;
}
/**
* Use `nock` to mock an `IntrospectionQuery`
*
* @param url string Root of the URL to mock; `/graphql` will automatically be appended
* @param sdl SDL of the schema to mock
*/
function mockIntrospectionQuery() {
nock(localURL, { encodedQueryParams: true })
.post(
"/graphql",
(request) => request.operationName === "IntrospectionQuery"
)
.reply(200, {
// The SDL doesn't actually get used because we'll be simulating network responses regardless of input,
// so we just use a fake SDL.
data: sdlToIntrospectionQueryResult(`type Query { me: ID }`),
});
}
/**
* Mock network requests for a successful schema composition. This includes the subsequent `CheckSchema`
* request that will be made.
*/
function mockCompositionSuccess() {
mockIntrospectionQuery();
nock(localURL, {
encodedQueryParams: true,
})
.post(
"/graphql",
({ operationName }) => operationName === "getFederationInfo"
)
.reply(200, {
data: {
_service: {
sdl: 'extend type Query {\n me: User\n}\n\ntype User @key(fields: "id") {\n name: String\n username: String\n birthDate: String\n}\n',
},
},
});
nock("https://engine-staging-graphql.apollographql.com:443", {
encodedQueryParams: true,
})
.post(
"/api/graphql",
({ operationName }) => operationName === "CheckPartialSchema"
)
.reply(200, {
data: {
service: {
checkPartialSchema: {
compositionValidationResult: {
compositionValidationDetails: {
schemaHash:
"645fdd4b789fffb5c5b59443a12e6f575e61345e95fe9e1dae3fe9acb23c68efa8ac31ea657892f0a85d1c90d8503fe9e482f520fe8d9786ae26948de10ce4a6",
},
graphCompositionID: null,
errors: [],
},
checkSchemaResult: {
targetUrl:
"https://engine-staging.apollographql.com/service/justin-fullstack-tutorial/check/3acd7765-61b2-4f1a-9227-8b288e42bfdc",
diffToPrevious: {
severity: "NOTICE",
affectedClients: [],
affectedQueries: [],
numberOfCheckedOperations: 0,
changes: [
{
severity: "NOTICE",
code: "ARG_CHANGED_TYPE",
description:
"`Query.launches` argument `after` has changed type from `String` to `String!`",
},
],
validationConfig: {
from: "-47347200",
to: "-0",
queryCountThreshold: 1,
queryCountThresholdPercentage: 0,
},
},
},
},
},
},
});
nock("https://engine-staging-graphql.apollographql.com:443", {
encodedQueryParams: true,
})
.post(
"/api/graphql",
({ operationName }) => operationName === "CheckSchema"
)
.reply(200, {
data: {
service: {
checkSchema: {
targetUrl:
"https://engine-staging.apollographql.com/service/justin-fullstack-tutorial/check/3acd7765-61b2-4f1a-9227-8b288e42bfdc",
diffToPrevious: {
severity: "NOTICE",
affectedClients: [],
affectedQueries: [],
numberOfCheckedOperations: 0,
changes: [
{
severity: "NOTICE",
code: "ARG_CHANGED_TYPE",
description:
"`Query.launches` argument `after` has changed type from `String` to `String!`",
},
],
validationConfig: {
from: "-47347200",
to: "-0",
queryCountThreshold: 1,
queryCountThresholdPercentage: 0,
},
},
},
},
},
});
}
/**
* Mock network requests for a non-federated schema check that produces errors.
*/
function mockNonFederatedFailure() {
mockIntrospectionQuery();
nock("https://engine-staging-graphql.apollographql.com:443", {
encodedQueryParams: true,
})
.post("/api/graphql", () => true)
.reply(200, {
data: {
service: {
checkSchema: {
targetUrl:
"https://engine-staging.apollographql.com/service/justin-fullstack-tutorial/check/3acd7765-61b2-4f1a-9227-8b288e42bfdc",
diffToPrevious: {
severity: "FAILURE",
affectedClients: [],
affectedQueries: [],
numberOfCheckedOperations: 0,
changes: [
{
severity: "FAILURE",
code: "ARG_CHANGED_TYPE",
description:
"`Query.launches` argument `after` has changed type from `String` to `String!`",
},
],
validationConfig: {
from: "-47347200",
to: "-0",
queryCountThreshold: 1,
queryCountThresholdPercentage: 0,
},
},
},
},
},
});
}
/**
* Mock network requests for a non-federated schema check that produces no errors.
*/
function mockNonFederatedSuccess() {
mockIntrospectionQuery();
nock("https://engine-staging-graphql.apollographql.com:443", {
encodedQueryParams: true,
})
.post("/api/graphql", () => true)
.reply(200, {
data: {
service: {
checkSchema: {
targetUrl:
"https://engine-staging.apollographql.com/service/justin-fullstack-tutorial/check/3acd7765-61b2-4f1a-9227-8b288e42bfdc",
diffToPrevious: {
severity: "NOTICE",
affectedClients: [],
affectedQueries: [],
numberOfCheckedOperations: 0,
changes: [
{
severity: "NOTICE",
code: "ARG_CHANGED_TYPE",
description:
"`Query.launches` argument `after` has changed type from `String` to `String!`",
},
],
validationConfig: {
from: "-47347200",
to: "-0",
queryCountThreshold: 1,
queryCountThresholdPercentage: 0,
},
},
},
},
},
});
}
/**
* Mock network requests for a federated schema running partialSchemaCheck and producing errors
*/
const mockPartialSchemaCheckFailure = () => {
mockIntrospectionQuery();
nock(localURL, {
encodedQueryParams: true,
})
.post(
"/graphql",
({ operationName }) => operationName === "getFederationInfo"
)
.reply(200, {
data: {
_service: {
sdl: 'extend type Query {\n me: User\n}\n\ntype User @key(fields: "id") {\n name: String\n username: String\n birthDate: String\n}\n',
},
},
});
nock("https://engine-staging-graphql.apollographql.com:443", {
encodedQueryParams: true,
})
.post(
"/api/graphql",
({ operationName }) => operationName === "CheckPartialSchema"
)
.reply(200, {
data: {
service: {
checkPartialSchema: {
compositionValidationResult: {
compositionValidationDetails: {
schemaHash: null,
},
graphCompositionID: null,
errors: [
{
message:
"[reviews] User.id -> marked @external but it does not have a matching field on on the base service (accounts)",
},
{
message:
"[reviews] User -> A @key selects id, but User.id could not be found",
},
{
message:
"[accounts] User -> A @key selects id, but User.id could not be found",
},
],
},
checkSchemaResult,
},
},
},
});
};
describe("service:check", () => {
let originalChalkSupportsColor;
beforeEach(() => {
originalChalkSupportsColor = chalk.supportsColor;
chalk.supportsColor = false;
// Clean console log capturing before tests in the event that `afterEach` was not run successfully.
uncaptureApplicationOutput();
// Clean up all network mocks before tests in the event that `afterEach` was not run successfully.
nock.cleanAll();
nock.disableNetConnect();
delete process.env.CI;
delete process.env.CIRCLECI;
delete process.env.GITHUB_ACTION;
delete process.env.BUILD_BUILDURI;
// Set the jest timeout to be longer than the default 5000ms to compensate for slow CI.
jest.setTimeout(25000);
});
afterEach(() => {
chalk.supportsColor = originalChalkSupportsColor;
// Clean up console log mocking
uncaptureApplicationOutput();
// Clean up all network mocks and restore original functionality
nock.cleanAll();
nock.enableNetConnect();
process.env.CI = CI;
process.env.CIRCLECI = CIRCLECI;
process.env.GITHUB_ACTION = GITHUB_ACTION;
process.env.BUILD_BUILDURI = BUILD_BUILDURI;
});
// These are integration tests and not e2e tests because these don't actually hit the remote server.
describe("integration", () => {
describe("federated", () => {
describe("should report composition errors correctly", () => {
it("vanilla", async () => {
captureApplicationOutput();
mockPartialSchemaCheckFailure();
expect.assertions(2);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
])
).rejects.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("compacts output in CI", async () => {
captureApplicationOutput();
mockPartialSchemaCheckFailure();
expect.assertions(2);
process.env.CI = "true";
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
])
).rejects.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--markdown", async () => {
captureApplicationOutput();
mockPartialSchemaCheckFailure();
expect.assertions(2);
// markdown formatted output should not throw
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
"--markdown",
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--json", async () => {
captureApplicationOutput();
mockPartialSchemaCheckFailure();
expect.assertions(2);
// JSON formatted output should not throw
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
"--json",
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
});
describe("should report composition success correctly", () => {
it("vanilla", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("errors when graph flag does not match token", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(1);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
`--graph=happy-fun-times`,
])
).rejects.toThrow(
/Cannot specify a service token that does not match graph./
);
});
it("allows setting graph with a flag", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(1);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
`--graph=happy-fun-times`,
`--key=service:happy-fun-times:asldf89jaose9jroinc`,
])
).resolves.not.toThrow();
});
it("compacts output in CI", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(2);
process.env.CI = "true";
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--markdown", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
"--markdown",
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--json", async () => {
captureApplicationOutput();
mockCompositionSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([
...cliKeyParameter,
"--serviceName=accounts",
`--endpoint=${localURL}/graphql`,
"--json",
])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
});
});
describe("non-federated", () => {
describe("should report traffic errors correctly", () => {
it("vanilla", async () => {
captureApplicationOutput();
mockNonFederatedFailure();
expect.assertions(2);
await expect(
ServiceCheck.run([...cliKeyParameter])
).rejects.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--markdown", async () => {
captureApplicationOutput();
mockNonFederatedFailure();
expect.assertions(2);
// markdown formatted output should not throw
await expect(
ServiceCheck.run([...cliKeyParameter, "--markdown"])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--json", async () => {
captureApplicationOutput();
mockNonFederatedFailure();
expect.assertions(2);
// JSON formatted output should not throw
await expect(
ServiceCheck.run([...cliKeyParameter, "--json"])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
});
describe("should report traffic non-errors correctly", () => {
it("vanilla", async () => {
captureApplicationOutput();
mockNonFederatedSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([...cliKeyParameter])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--markdown", async () => {
captureApplicationOutput();
mockNonFederatedSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([...cliKeyParameter, "--markdown"])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
it("--json", async () => {
captureApplicationOutput();
mockNonFederatedSuccess();
expect.assertions(2);
await expect(
ServiceCheck.run([...cliKeyParameter, "--json"])
).resolves.not.toThrow();
// Inline snapshots don't work here due to https://github.com/facebook/jest/issues/6744.
expect(uncaptureApplicationOutput()).toMatchSnapshot();
});
});
});
});
describe("markdown formatting", () => {
it("is correct with breaking changes", () => {
expect(
formatMarkdown({
graphName: "engine",
tag: "staging",
checkSchemaResult,
graphCompositionID: "fff",
})
).toMatchSnapshot();
// Check when all the values are singluar
expect(
formatMarkdown({
graphName: "engine",
tag: "staging",
checkSchemaResult: {
...checkSchemaResult,
diffToPrevious: {
...checkSchemaResult.diffToPrevious,
affectedClients: [
checkSchemaResult.diffToPrevious.affectedClients[0],
],
affectedQueries: [
checkSchemaResult.diffToPrevious.affectedQueries[0],
],
changes: [
checkSchemaResult.diffToPrevious.changes.find(
(change) => change.severity === ChangeSeverity.FAILURE
),
],
numberOfCheckedOperations: 1,
},
},
graphCompositionID: "fff",
})
).toMatchSnapshot();
});
it("is correct with no breaking changes", () => {
expect(
formatMarkdown({
graphName: "engine",
tag: "staging",
checkSchemaResult: {
...checkSchemaResult,
diffToPrevious: {
...checkSchemaResult.diffToPrevious,
severity: ChangeSeverity.NOTICE,
affectedClients: [],
affectedQueries: [],
changes: [
{
__typename: "Change",
code: "FIELD_ADDED",
severity: ChangeSeverity.NOTICE,
} as graphqlTypes.CheckSchema_service_checkSchema_diffToPrevious_changes,
],
},
},
graphCompositionID: "fff",
})
).toMatchSnapshot();
});
it("is correct with no changes", () => {
expect(
formatMarkdown({
graphName: "engine",
tag: "staging",
checkSchemaResult: {
...checkSchemaResult,
diffToPrevious: {
...checkSchemaResult.diffToPrevious,
severity: ChangeSeverity.NOTICE,
affectedClients: [],
affectedQueries: [],
changes: [],
validationConfig: null,
},
},
graphCompositionID: "fff",
})
).toMatchSnapshot();
});
});
describe("formatTimePeriod", () => {
it("should show current result for 1 hour", () => {
expect(formatTimePeriod(1)).toMatchInlineSnapshot(`"1 hour"`);
});
it("should show current result for 12 hours", () => {
expect(formatTimePeriod(12)).toMatchInlineSnapshot(`"12 hours"`);
});
it("should show current result for 24 hours", () => {
expect(formatTimePeriod(24)).toMatchInlineSnapshot(`"24 hours"`);
});
it("should show current result for 36 hours", () => {
expect(formatTimePeriod(36)).toMatchInlineSnapshot(`"1 day"`);
});
it("should show current result for 48 hours", () => {
expect(formatTimePeriod(48)).toMatchInlineSnapshot(`"2 days"`);
});
});
describe("formatHumanReadable", () => {
it("should have correct output with breaking and non-breaking changes", () => {
expect(
stripAnsi(
formatHumanReadable({
checkSchemaResult,
graphCompositionID: "fff",
})
)
).toMatchSnapshot();
});
it("should have correct output with only non-breaking changes", () => {
expect(
formatHumanReadable({
checkSchemaResult: {
...checkSchemaResult,
diffToPrevious: {
...checkSchemaResult.diffToPrevious,
severity: ChangeSeverity.NOTICE,
affectedQueries: [],
changes: [],
},
},
graphCompositionID: "fff",
})
).toMatchSnapshot();
});
it("should have correct output with only breaking changes", () => {
expect(
// remove color from snapshot, circle ci doesn't like it
stripAnsi(
formatHumanReadable({
checkSchemaResult: {
...checkSchemaResult,
diffToPrevious: {
...checkSchemaResult.diffToPrevious,
severity: ChangeSeverity.NOTICE,
affectedQueries: [],
changes: checkSchemaResult.diffToPrevious.changes.filter(
(change) => change.severity === ChangeSeverity.FAILURE
),
},
},
graphCompositionID: "fff",
})
)
).toMatchSnapshot();
});
});
});
//TODO: Turn these tests back on
// jest.mock("apollo-codegen-core/lib/localfs", () => {
// return require("../../../__mocks__/localfs");
// });
// // this is because of herkou-cli-utils hacky mocking system on their console logger
// import { stdout, captureApplicationOutput } from "heroku-cli-util";
// import path from "path";
// import fs from "fs";
// import { test as setup } from "apollo-cli-test";
// import { introspectionQuery, print, execute, buildSchema } from "graphql";
// import gql from "graphql-tag";
// import { ENGINE_URI } from "../../../engine";
// import { VALIDATE_SCHEMA } from "../../../operations/validateSchema";
// import { vol, fs as mockFS } from "apollo-codegen-core/lib/localfs";
// const test = setup.do(() => captureApplicationOutput());
// const APOLLO_KEY = "service:test:1234";
// const hash = "12345";
// const schemaContents = fs.readFileSync(
// path.resolve(__dirname, "./fixtures/schema.graphql"),
// {
// encoding: "utf-8"
// }
// );
// const fullSchema = execute(buildSchema(schemaContents), gql(introspectionQuery))
// .data;
// const localSuccess = nock => {
// nock
// .post("/graphql", {
// query: print(gql(introspectionQuery)),
// operationName: "IntrospectionQuery",
// variables: {}
// })
// .reply(200, { data: fullSchema });
// };
// const engineSuccess = ({ schema, tag, results } = {}) => nock => {
// nock
// .matchHeader("x-api-key", APOLLO_KEY)
// .post("/", {
// operationName: "CheckSchema",
// variables: {
// id: "test",
// schema: schema || fullSchema.__schema,
// tag: tag || "current",
// gitContext: {
// commit: /.+/i,
// remoteUrl: /apollo-tooling/i,
// committer: /@/i
// }
// },
// query: print(VALIDATE_SCHEMA)
// })
// .reply(200, {
// data: {
// service: {
// schema: {
// checkSchema: {
// changes: results || [
// {
// severity: "NOTICE",
// code: "DEPRECATION_ADDED",
// description: "Field `User.lastName` was deprecated"
// },
// {
// severity: "WARNING",
// code: "FIELD_REMOVED",
// description: "Field `User.firstName` removed"
// },
// {
// severity: "FAILURE",
// code: "ARG_CHANGE_TYPE",
// description: "Argument id on `Query.user` changed to ID!"
// },
// {
// severity: "NOTICE",
// code: "FIELD_ADDED",
// description: "Field `User.fullName` was added"
// }
// ]
// }
// }
// }
// }
// });
// };
// jest.setTimeout(25000);
// beforeEach(() => {
// vol.reset();
// vol.fromJSON({
// __blankFileSoDirectoryExists: ""
// });
// });
// describe("successful checks", () => {
// test
// .nock("http://localhost:4000", localSuccess)
// .nock(ENGINE_URI, engineSuccess())
// .env({ APOLLO_KEY })
// .stdout()
// .command(["schema:check"])
// .exit(1)
// .it("compares against the latest uploaded schema", () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// });
// test
// .nock("http://localhost:4000", localSuccess)
// .nock(ENGINE_URI, engineSuccess())
// .stdout()
// .command(["schema:check", `--key=${APOLLO_KEY}`])
// .exit(1)
// .it("allows custom api key", () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// });
// test
// .nock("http://localhost:4000", localSuccess)
// .nock(ENGINE_URI, engineSuccess({ results: [] }))
// .env({ APOLLO_KEY })
// .stdout()
// .command(["schema:check"])
// .it(
// "compares against the latest uploaded schema with no change",
// ({ stdout }) => {
// expect(stdout).toContain("No changes");
// }
// );
// test
// .stdout()
// .nock("https://staging.example.com", localSuccess)
// .nock(ENGINE_URI, engineSuccess())
// .env({ APOLLO_KEY })
// .command(["schema:check", "--endpoint=https://staging.example.com/graphql"])
// .exit(1)
// .it("compares against a schema from a custom remote", () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// });
// test
// .stdout()
// .nock("http://localhost:4000", localSuccess)
// .nock(
// "https://engine.example.com",
// engineSuccess({ engine: "https://engine.example.com" })
// )
// .env({ APOLLO_KEY })
// .command(["schema:check", "--engine=https://engine.example.com"])
// .exit(1)
// .it("compares against a schema from a custom registry", std => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// });
// test
// .stdout()
// .nock("https://staging.example.com", nock => {
// nock
// .matchHeader("Authorization", "1234")
// .matchHeader("Hello", "World")
// .post("/graphql", {
// query: print(gql(introspectionQuery)),
// operationName: "IntrospectionQuery",
// variables: {}
// })
// .reply(200, { data: fullSchema });
// })
// .nock(ENGINE_URI, engineSuccess())
// .env({ APOLLO_KEY })
// .command([
// "schema:check",
// "--endpoint=https://staging.example.com/graphql",
// "--header=Authorization: 1234",
// "--header=Hello: World"
// ])
// .exit(1)
// .it(
// "calls engine with a schema from a custom remote with custom headers",
// () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// }
// );
// test
// .do(() =>
// vol.fromJSON({
// "introspection-result.json": JSON.stringify({ data: fullSchema })
// })
// )
// .stdout()
// .nock(ENGINE_URI, engineSuccess())
// .env({ APOLLO_KEY })
// .command(["schema:check", "--endpoint=introspection-result.json"])
// .exit(1)
// .it(
// "calls engine with a schema from an introspection result on the filesystem",
// () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// }
// );
// test
// .do(() =>
// vol.fromJSON({
// "schema.graphql": schemaContents
// })
// )
// .stdout()
// .nock(ENGINE_URI, engineSuccess({ schema: fullSchema.__schema }))
// .env({ APOLLO_KEY })
// .command(["schema:check", "--endpoint=schema.graphql"])
// .exit(1)
// .it(
// "calls engine with a schema from a schema file on the filesystem",
// () => {
// expect(stdout).toContain("FAILURE");
// expect(stdout).toContain("NOTICE");
// expect(stdout).toContain("WARNING");
// }
// );
// test
// .nock("http://localhost:4000", localSuccess)
// .nock(ENGINE_URI, engineSuccess())
// .env({ APOLLO_KEY })
// .stdout()
// .command(["schema:check", "--json"])
// .exit(1)
// .it("allows formatting success as JSON", () => {
// expect(stdout).toContain('"severity": "FAILURE"');
// });
// });
// describe("error handling", () => {
// test
// .command(["schema:check"])
// .catch(err => expect(err.message).toMatch(/No API key/))
// .it("errors with no service API key");
// }); | the_stack |
import * as THREE from "three";
import DeadzoneChecker from "~/controls/ar/common/DeadzoneChecker";
import * as TOUCH from "~/consts/touch";
describe("DeadzoneChecker", () => {
describe("Initial properties", () => {
it("should have default size of 0.1", () => {
expect(new DeadzoneChecker().size).to.equal(0.1);
});
it("should not in deadzone at initialization", () => {
expect(new DeadzoneChecker().inDeadzone).to.be.false;
});
it("has default aspect of 1", () => {
// Given
const deadzoneChecker = new DeadzoneChecker();
const input = new THREE.Vector2(5, 5);
// When
deadzoneChecker.applyScreenAspect([input]);
// Then
expect(input).to.deep.equal(new THREE.Vector2(5, 5));
});
});
describe("Options", () => {
it("can set size at initialization", () => {
expect(new DeadzoneChecker({ size: 10 }).size).to.equal(10);
});
it("can change size at any time", () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 1 });
// When
deadzoneChecker.size = 100;
// Then
expect(deadzoneChecker.size).to.equal(100);
});
});
describe("aspect", () => {
it("should apply given aspect to coord.x if aspect is lower than 1", () => {
// Given
const deadzoneChecker = new DeadzoneChecker();
const input = new THREE.Vector2(5, 5);
// When
deadzoneChecker.setAspect(0.5);
deadzoneChecker.applyScreenAspect([input]);
// Then
expect(input).to.deep.equal(new THREE.Vector2(10, 5));
});
it("should apply given aspect to coord.y if aspect is bigger than 1", () => {
// Given
const deadzoneChecker = new DeadzoneChecker();
const input = new THREE.Vector2(5, 5);
// When
deadzoneChecker.setAspect(2);
deadzoneChecker.applyScreenAspect([input]);
// Then
expect(input).to.deep.equal(new THREE.Vector2(5, 10));
});
});
describe("Detecting a gesture", () => {
describe("One finger horizontal", () => {
const correctTestingGestures = [TOUCH.GESTURE.ONE_FINGER_HORIZONTAL, TOUCH.GESTURE.ONE_FINGER];
const inCorrectTestingGestures = [TOUCH.GESTURE.NONE, TOUCH.GESTURE.ONE_FINGER_VERTICAL, TOUCH.GESTURE.TWO_FINGER, TOUCH.GESTURE.PINCH];
correctTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.ONE_FINGER_HORIZONTAL if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 0)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.ONE_FINGER_HORIZONTAL);
});
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before and it's in deadzone`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 0)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
inCorrectTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 0)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
});
describe("One finger vertical", () => {
const correctTestingGestures = [TOUCH.GESTURE.ONE_FINGER_VERTICAL, TOUCH.GESTURE.ONE_FINGER];
const inCorrectTestingGestures = [TOUCH.GESTURE.NONE, TOUCH.GESTURE.ONE_FINGER_HORIZONTAL, TOUCH.GESTURE.TWO_FINGER, TOUCH.GESTURE.PINCH];
correctTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.ONE_FINGER_VERTICAL if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(0, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.ONE_FINGER_VERTICAL);
});
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before and it's in deadzone`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(0, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
inCorrectTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(0, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
});
describe("Two finger horizontal", () => {
const correctTestingGestures = [TOUCH.GESTURE.TWO_FINGER_HORIZONTAL, TOUCH.GESTURE.TWO_FINGER];
const inCorrectTestingGestures = [TOUCH.GESTURE.NONE, TOUCH.GESTURE.TWO_FINGER_VERTICAL, TOUCH.GESTURE.ONE_FINGER, TOUCH.GESTURE.PINCH];
correctTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.TWO_FINGER_HORIZONTAL if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 1), new THREE.Vector2(0, -1)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(1, -1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.TWO_FINGER_HORIZONTAL);
});
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before and it's in deadzone`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 1), new THREE.Vector2(0, -1)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(1, -1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
inCorrectTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 1), new THREE.Vector2(0, -1)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(1, -1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
});
describe("Two finger vertical", () => {
const correctTestingGestures = [TOUCH.GESTURE.TWO_FINGER_VERTICAL, TOUCH.GESTURE.TWO_FINGER];
const inCorrectTestingGestures = [TOUCH.GESTURE.NONE, TOUCH.GESTURE.TWO_FINGER_HORIZONTAL, TOUCH.GESTURE.ONE_FINGER, TOUCH.GESTURE.PINCH];
correctTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.TWO_FINGER_VERTICAL if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(1, 0), new THREE.Vector2(-1, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(-1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.TWO_FINGER_VERTICAL);
});
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before and it's in deadzone`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(1, 0), new THREE.Vector2(-1, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(-1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
inCorrectTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(1, 0), new THREE.Vector2(-1, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(1, 1), new THREE.Vector2(-1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
});
describe("Pinch", () => {
const correctTestingGestures = [TOUCH.GESTURE.PINCH];
const inCorrectTestingGestures = [TOUCH.GESTURE.NONE, TOUCH.GESTURE.ONE_FINGER, TOUCH.GESTURE.TWO_FINGER];
correctTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.PINCH if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0), new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(-1, -1), new THREE.Vector2(1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.PINCH);
});
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before and it's in deadzone`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 5 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0), new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(-1, -1), new THREE.Vector2(1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
inCorrectTestingGestures.forEach(gesture => {
it(`should return TOUCH.GESTURE.NONE if testing gesture ${TOUCH.GESTURE[gesture]} is added before`, () => {
// Given
const deadzoneChecker = new DeadzoneChecker({ size: 0.1 });
// When
deadzoneChecker.addTestingGestures(gesture);
deadzoneChecker.setFirstInput([new THREE.Vector2(0, 0), new THREE.Vector2(0, 0)]);
const result = deadzoneChecker.check([new THREE.Vector2(-1, -1), new THREE.Vector2(1, 1)]);
// Then
expect(result).to.equal(TOUCH.GESTURE.NONE);
});
});
});
});
}); | the_stack |
import "reflect-metadata";
import {Post} from "./entity/Post";
import {Connection} from "../../../../../src/connection/Connection";
import {closeTestingConnections, createTestingConnections, reloadTestingDatabases} from "../../../../utils/test-utils";
import {PostWithOptions} from "./entity/PostWithOptions";
import {PostWithoutTypes} from "./entity/PostWithoutTypes";
import {DateUtils} from "../../../../../src/util/DateUtils";
import {FruitEnum} from "./enum/FruitEnum";
describe("database schema > column types > mssql", () => { // https://github.com/tediousjs/tedious/issues/722
let connections: Connection[];
before(async () => {
connections = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
enabledDrivers: ["mssql"],
});
});
beforeEach(() => reloadTestingDatabases(connections));
after(() => closeTestingConnections(connections));
it("all types should work correctly - persist and hydrate", () => Promise.all(connections.map(async connection => {
const postRepository = connection.getRepository(Post);
const queryRunner = connection.createQueryRunner();
const table = await queryRunner.getTable("post");
await queryRunner.release();
const post = new Post();
post.id = 1;
post.name = "Post";
post.bit = true;
post.tinyint = 127;
post.smallint = 32767;
post.int = 2147483647;
post.bigint = "9007199254740991";
post.decimal = 50;
post.dec = 100;
post.numeric = 10;
post.float = 10.53;
post.real = 10.5;
post.smallmoney = 100;
post.money = 2500;
post.uniqueidentifier = "FD357B8F-8838-42F6-B7A2-AE027444E895";
post.char = "A";
post.varchar = "This is varchar";
post.text = "This is text";
post.nchar = "A";
post.nvarchar = "This is nvarchar";
post.ntext = "This is ntext";
post.binary = Buffer.from("A");
post.varbinary = Buffer.from("B");
post.image = Buffer.from("This is image");
post.dateObj = new Date();
post.date = "2017-06-21";
post.datetime = new Date();
post.datetime.setMilliseconds(0); // set milliseconds to zero because the SQL Server datetime type only has a 1/300 ms (~3.33̅ ms) resolution
post.datetime2 = new Date();
post.smalldatetime = new Date();
post.smalldatetime.setSeconds(0); // set seconds to zero because smalldatetime type rounds seconds
post.smalldatetime.setMilliseconds(0); // set milliseconds to zero because smalldatetime type does not stores milliseconds
post.timeObj = new Date();
post.time = "15:30:00";
post.datetimeoffset = new Date();
post.geometry1 = "LINESTRING (100 100, 20 180, 180 180)";
post.geometry2 = "POLYGON ((0 0, 150 0, 150 150, 0 150, 0 0))";
post.geometry3 = "GEOMETRYCOLLECTION (POINT (4 0), LINESTRING (4 2, 5 3), POLYGON ((0 0, 3 0, 3 3, 0 3, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1)))";
post.simpleArray = ["A", "B", "C"];
post.simpleJson = { param: "VALUE" };
post.simpleEnum = "A";
post.simpleClassEnum1 = FruitEnum.Apple;
await postRepository.save(post);
const loadedPost = (await postRepository.findOne(1))!;
loadedPost.id.should.be.equal(post.id);
loadedPost.name.should.be.equal(post.name);
loadedPost.bit.should.be.equal(post.bit);
loadedPost.smallint.should.be.equal(post.smallint);
loadedPost.tinyint.should.be.equal(post.tinyint);
loadedPost.int.should.be.equal(post.int);
loadedPost.bigint.should.be.equal(post.bigint);
loadedPost.decimal.should.be.equal(post.decimal);
loadedPost.dec.should.be.equal(post.dec);
loadedPost.numeric.should.be.equal(post.numeric);
loadedPost.float.should.be.equal(post.float);
loadedPost.real.should.be.equal(post.real);
loadedPost.smallmoney.should.be.equal(post.smallmoney);
loadedPost.money.should.be.equal(post.money);
loadedPost.uniqueidentifier.should.be.equal(post.uniqueidentifier);
loadedPost.char.should.be.equal(post.char);
loadedPost.varchar.should.be.equal(post.varchar);
loadedPost.text.should.be.equal(post.text);
loadedPost.nchar.should.be.equal(post.nchar);
loadedPost.nvarchar.should.be.equal(post.nvarchar);
loadedPost.ntext.should.be.equal(post.ntext);
loadedPost.binary.toString().should.be.equal(post.binary.toString());
loadedPost.varbinary.toString().should.be.equal(post.varbinary.toString());
loadedPost.image.toString().should.be.equal(post.image.toString());
loadedPost.rowversion.should.not.be.null;
loadedPost.rowversion.should.not.be.undefined;
loadedPost.dateObj.should.be.equal(DateUtils.mixedDateToDateString(post.dateObj));
loadedPost.date.should.be.equal(post.date);
// commented because mssql inserted milliseconds are not always equal to what we say it to insert
// commented to prevent CI failings
// loadedPost.datetime.getTime().should.be.equal(post.datetime.getTime());
// loadedPost.datetime2.getTime().should.be.equal(post.datetime2.getTime());
// loadedPost.datetimeoffset.getTime().should.be.equal(post.datetimeoffset.getTime());
loadedPost.geometry1.should.be.equal(post.geometry1);
loadedPost.geometry2.should.be.equal(post.geometry2);
loadedPost.geometry3.should.be.equal(post.geometry3);
loadedPost.smalldatetime.getTime().should.be.equal(post.smalldatetime.getTime());
loadedPost.timeObj.should.be.equal(DateUtils.mixedTimeToString(post.timeObj));
loadedPost.time.should.be.equal(post.time);
loadedPost.simpleArray[0].should.be.equal(post.simpleArray[0]);
loadedPost.simpleArray[1].should.be.equal(post.simpleArray[1]);
loadedPost.simpleArray[2].should.be.equal(post.simpleArray[2]);
loadedPost.simpleJson.param.should.be.equal(post.simpleJson.param);
loadedPost.simpleEnum.should.be.equal(post.simpleEnum);
loadedPost.simpleClassEnum1.should.be.equal(post.simpleClassEnum1);
table!.findColumnByName("id")!.type.should.be.equal("int");
table!.findColumnByName("name")!.type.should.be.equal("nvarchar");
table!.findColumnByName("bit")!.type.should.be.equal("bit");
table!.findColumnByName("tinyint")!.type.should.be.equal("tinyint");
table!.findColumnByName("smallint")!.type.should.be.equal("smallint");
table!.findColumnByName("int")!.type.should.be.equal("int");
table!.findColumnByName("bigint")!.type.should.be.equal("bigint");
table!.findColumnByName("decimal")!.type.should.be.equal("decimal");
table!.findColumnByName("dec")!.type.should.be.equal("decimal");
table!.findColumnByName("numeric")!.type.should.be.equal("numeric");
table!.findColumnByName("float")!.type.should.be.equal("float");
table!.findColumnByName("real")!.type.should.be.equal("real");
table!.findColumnByName("smallmoney")!.type.should.be.equal("smallmoney");
table!.findColumnByName("money")!.type.should.be.equal("money");
table!.findColumnByName("uniqueidentifier")!.type.should.be.equal("uniqueidentifier");
table!.findColumnByName("char")!.type.should.be.equal("char");
table!.findColumnByName("varchar")!.type.should.be.equal("varchar");
table!.findColumnByName("text")!.type.should.be.equal("text");
table!.findColumnByName("nchar")!.type.should.be.equal("nchar");
table!.findColumnByName("nvarchar")!.type.should.be.equal("nvarchar");
table!.findColumnByName("ntext")!.type.should.be.equal("ntext");
table!.findColumnByName("binary")!.type.should.be.equal("binary");
table!.findColumnByName("varbinary")!.type.should.be.equal("varbinary");
table!.findColumnByName("image")!.type.should.be.equal("image");
// the rowversion type's name in SQL server metadata is timestamp
table!.findColumnByName("rowversion")!.type.should.be.equal("timestamp");
table!.findColumnByName("date")!.type.should.be.equal("date");
table!.findColumnByName("dateObj")!.type.should.be.equal("date");
table!.findColumnByName("datetime")!.type.should.be.equal("datetime");
table!.findColumnByName("datetime2")!.type.should.be.equal("datetime2");
table!.findColumnByName("smalldatetime")!.type.should.be.equal("smalldatetime");
table!.findColumnByName("time")!.type.should.be.equal("time");
table!.findColumnByName("timeObj")!.type.should.be.equal("time");
table!.findColumnByName("datetimeoffset")!.type.should.be.equal("datetimeoffset");
table!.findColumnByName("geometry1")!.type.should.be.equal("geometry");
table!.findColumnByName("simpleArray")!.type.should.be.equal("ntext");
table!.findColumnByName("simpleJson")!.type.should.be.equal("ntext");
table!.findColumnByName("simpleEnum")!.type.should.be.equal("nvarchar");
table!.findColumnByName("simpleEnum")!.enum![0].should.be.equal("A");
table!.findColumnByName("simpleEnum")!.enum![1].should.be.equal("B");
table!.findColumnByName("simpleEnum")!.enum![2].should.be.equal("C");
table!.findColumnByName("simpleClassEnum1")!.type.should.be.equal("nvarchar");
table!.findColumnByName("simpleClassEnum1")!.enum![0].should.be.equal("apple");
table!.findColumnByName("simpleClassEnum1")!.enum![1].should.be.equal("pineapple");
table!.findColumnByName("simpleClassEnum1")!.enum![2].should.be.equal("banana");
})));
it("all types should work correctly - persist and hydrate when options are specified on columns", () => Promise.all(connections.map(async connection => {
const postRepository = connection.getRepository(PostWithOptions);
const queryRunner = connection.createQueryRunner();
const table = await queryRunner.getTable("post_with_options");
await queryRunner.release();
const post = new PostWithOptions();
post.id = 1;
post.decimal = 50;
post.dec = 60;
post.numeric = 70;
post.char = "AAA";
post.varchar = "This is varchar";
post.nchar = "AAA";
post.nvarchar = "This is nvarchar";
post.binary = Buffer.from("AAAAA");
post.varbinary = Buffer.from("BBBBB");
post.datetime2 = new Date();
post.time = new Date();
post.datetimeoffset = new Date();
await postRepository.save(post);
const loadedPost = (await postRepository.findOne(1))!;
loadedPost.id.should.be.equal(post.id);
loadedPost.char.should.be.equal(post.char);
loadedPost.varchar.should.be.equal(post.varchar);
loadedPost.nchar.should.be.equal(post.nchar);
loadedPost.nvarchar.should.be.equal(post.nvarchar);
loadedPost.decimal.should.be.equal(post.decimal);
loadedPost.dec.should.be.equal(post.dec);
loadedPost.numeric.should.be.equal(post.numeric);
loadedPost.char.should.be.equal(post.char);
loadedPost.varchar.should.be.equal(post.varchar);
loadedPost.nchar.should.be.equal(post.nchar);
loadedPost.nvarchar.should.be.equal(post.nvarchar);
loadedPost.binary.toString().should.be.equal(post.binary.toString());
loadedPost.varbinary.toString().should.be.equal(post.varbinary.toString());
// commented because mssql inserted milliseconds are not always equal to what we say it to insert
// commented to prevent CI failings
// loadedPost.datetime2.getTime().should.be.equal(post.datetime2.getTime());
// loadedPost.datetimeoffset.getTime().should.be.equal(post.datetimeoffset.getTime());
loadedPost.time.should.be.equal(DateUtils.mixedTimeToString(post.time));
table!.findColumnByName("id")!.type.should.be.equal("int");
table!.findColumnByName("decimal")!.type.should.be.equal("decimal");
table!.findColumnByName("decimal")!.precision!.should.be.equal(10);
table!.findColumnByName("decimal")!.scale!.should.be.equal(5);
table!.findColumnByName("dec")!.type.should.be.equal("decimal");
table!.findColumnByName("dec")!.precision!.should.be.equal(10);
table!.findColumnByName("dec")!.scale!.should.be.equal(5);
table!.findColumnByName("numeric")!.type.should.be.equal("numeric");
table!.findColumnByName("numeric")!.precision!.should.be.equal(10);
table!.findColumnByName("numeric")!.scale!.should.be.equal(5);
table!.findColumnByName("char")!.type.should.be.equal("char");
table!.findColumnByName("char")!.length!.should.be.equal("3");
table!.findColumnByName("varchar")!.type.should.be.equal("varchar");
table!.findColumnByName("varchar")!.length!.should.be.equal("50");
table!.findColumnByName("nchar")!.type.should.be.equal("nchar");
table!.findColumnByName("nchar")!.length!.should.be.equal("3");
table!.findColumnByName("nvarchar")!.type.should.be.equal("nvarchar");
table!.findColumnByName("nvarchar")!.length!.should.be.equal("40");
table!.findColumnByName("binary")!.type.should.be.equal("binary");
table!.findColumnByName("binary")!.length!.should.be.equal("5");
table!.findColumnByName("varbinary")!.type.should.be.equal("varbinary");
table!.findColumnByName("varbinary")!.length!.should.be.equal("5");
table!.findColumnByName("datetime2")!.type.should.be.equal("datetime2");
table!.findColumnByName("datetime2")!.precision!.should.be.equal(4);
table!.findColumnByName("time")!.type.should.be.equal("time");
table!.findColumnByName("time")!.precision!.should.be.equal(5);
table!.findColumnByName("datetimeoffset")!.type.should.be.equal("datetimeoffset");
table!.findColumnByName("datetimeoffset")!.precision!.should.be.equal(6);
})));
it("all types should work correctly - persist and hydrate when types are not specified on columns", () => Promise.all(connections.map(async connection => {
const postRepository = connection.getRepository(PostWithoutTypes);
const queryRunner = connection.createQueryRunner();
const table = await queryRunner.getTable("post_without_types");
await queryRunner.release();
const post = new PostWithoutTypes();
post.id = 1;
post.name = "Post";
post.bit = true;
post.binary = Buffer.from("A");
post.datetime = new Date();
post.datetime.setMilliseconds(0); // set milliseconds to zero because the SQL Server datetime type only has a 1/300 ms (~3.33̅ ms) resolution
await postRepository.save(post);
const loadedPost = (await postRepository.findOne(1))!;
loadedPost.id.should.be.equal(post.id);
loadedPost.name.should.be.equal(post.name);
loadedPost.bit.should.be.equal(post.bit);
loadedPost.binary.toString().should.be.equal(post.binary.toString());
loadedPost.datetime.getTime().should.be.equal(post.datetime.getTime());
table!.findColumnByName("id")!.type.should.be.equal("int");
table!.findColumnByName("name")!.type.should.be.equal("nvarchar");
table!.findColumnByName("bit")!.type.should.be.equal("bit");
table!.findColumnByName("binary")!.type.should.be.equal("binary");
table!.findColumnByName("datetime")!.type.should.be.equal("datetime");
})));
}); | the_stack |
import { CommerceTypes } from '@brandingbrand/fscommerce';
import React, { Component } from 'react';
import { get } from 'lodash-es';
import pluralize from 'pluralize';
import { UnwrappedProductIndexProps as ProductIndexPropType } from './ProductIndex';
import { WithProductIndexProps } from './ProductIndexProvider';
import ProductList from './ProductList';
import {
FilterItem,
FilterItemValue,
FilterList,
FilterListDrilldown,
Loading,
Modal,
ModalHalfScreen,
ProductItem,
RefineActionBar,
SelectableList,
SelectableRow
} from '@brandingbrand/fscomponents';
import { style as S } from '../styles/ProductIndex';
import {
Image,
ListRenderItemInfo,
SafeAreaView,
StyleProp,
StyleSheet,
Text,
TouchableOpacity,
View,
ViewStyle
} from 'react-native';
import FSI18n, { translationKeys } from '@brandingbrand/fsi18n';
const images = {
close: require('../../assets/images/iconClose.png')
};
const styles = StyleSheet.create({
cancelButton: {
position: 'absolute',
top: 16,
right: 21.5
}
});
const componentTranslationKeys = translationKeys.flagship.productIndex;
export interface PropTyps extends ProductIndexPropType {
onPress: (data: CommerceTypes.Product) => () => void;
containerStyle?: StyleProp<ViewStyle>;
headerWithCancelButton?: boolean;
}
const defaultErrorMessage =
'We were unable to load the information at this time. Please try again.';
const SORT_ITEM_KEY = '__pirate_sort';
export interface StateType {
sortModalVisible: boolean;
filterModalVisible: boolean;
isLoading: boolean;
isMoreLoading: boolean;
hasFetchError: boolean;
}
export default class ProductIndexGrid extends Component<
PropTyps & WithProductIndexProps,
StateType
> {
constructor(props: PropTyps & WithProductIndexProps) {
super(props);
const { commerceData, onLoadComplete } = props;
let maxPageLoaded = 1;
let maxCount = 1;
if (commerceData) {
maxCount = this.maxCount(commerceData);
if (commerceData.page) {
maxPageLoaded = commerceData.page;
if (commerceData.limit && commerceData.products) {
maxCount = (commerceData.limit * (commerceData.page - 1)) + commerceData.products.length;
}
}
if (onLoadComplete) {
onLoadComplete(this.loadMore, maxPageLoaded < this.maxPage(commerceData),
maxCount, maxCount);
}
}
this.state = {
sortModalVisible: false,
filterModalVisible: false,
isLoading: false,
isMoreLoading: false,
hasFetchError: false
};
}
maxCount = (commerceData?: CommerceTypes.ProductIndex): number => {
if (commerceData && commerceData.limit && commerceData.total && commerceData.page) {
const maxPage = this.maxPage(commerceData);
if (commerceData.page < maxPage) {
return commerceData.page * commerceData.limit;
} else {
return commerceData.total;
}
} else {
return 0;
}
}
maxPage = (commerceData?: CommerceTypes.ProductIndex) => {
if (commerceData && commerceData.total && commerceData.limit) {
return Math.ceil(commerceData.total / commerceData.limit);
}
return 1;
}
renderItem = ({ item }: ListRenderItemInfo<CommerceTypes.Product>): JSX.Element => {
const { productItemProps, onPress, renderProductItem } = this.props;
if (renderProductItem) {
return renderProductItem(item);
}
return (
<ProductItem
style={S.productItem}
id={item.id}
title={item.title}
brand={item.brand}
image={item.images && item.images.find(img => !!img.uri)}
imageStyle={S.productImage}
price={item.price}
originalPrice={item.originalPrice}
variantText={getVariantText(item)}
promos={item.promotions}
reviewValue={get(item, 'review.summary.averageRating')}
reviewCount={get(item, 'review.summary.reviewCount')}
onPress={onPress(item)}
{...productItemProps}
/>
);
}
renderActionBar = () => {
const {
commerceData,
hideActionBar,
mergeSortToFilter,
refineActionBarProps,
renderRefineActionBar
} = this.props;
if (hideActionBar || !commerceData) {
return null;
}
if (renderRefineActionBar) {
return renderRefineActionBar(
this.showFilterModal,
this.showSortModal,
commerceData,
this.handleFilterApply,
this.handleFilterReset
);
}
return (
<RefineActionBar
style={S.actionBar}
onSortPress={this.showSortModal}
onFilterPress={this.showFilterModal}
sortButtonStyle={mergeSortToFilter ? { display: 'none' } : null}
{...refineActionBarProps}
/>
);
}
renderHeader = () => {
const { commerceData } = this.props;
if (!commerceData) {
return null;
}
let loadPrev: JSX.Element | null = null;
if (this.props.renderLoadPrev) {
if (this.state.isMoreLoading) {
loadPrev = this.props.renderLoading ? (
this.props.renderLoading()
) : (
<Loading
style={[
S.loading,
S.loadingLoadMore,
this.props.loadMoreLoadingStyle
]}
/>
);
} else {
loadPrev = this.props.renderLoadPrev(
this.loadPrev,
(commerceData.minPage || commerceData.page || 1) > 1
);
}
}
return (
<View>
{loadPrev}
{this.renderActionBar()}
</View>
);
}
showFilterModal = () => {
this.setState({ filterModalVisible: true });
}
closeFilterModal = () => {
this.setState({ filterModalVisible: false });
}
showSortModal = () => {
this.setState({ sortModalVisible: true });
}
closeSortModal = () => {
this.setState({ sortModalVisible: false });
}
handleFilterApply = (selectedItems: any, info?: { isButtonPress: boolean }) => {
if (!this.props.filterInBackground) {
this.closeFilterModal();
} else {
if (info && info.isButtonPress) {
this.closeFilterModal();
return;
}
}
let sortQuery = {};
let refinementsQuery = {};
if (selectedItems[SORT_ITEM_KEY]) {
sortQuery = { sortBy: selectedItems[SORT_ITEM_KEY][0] };
delete selectedItems[SORT_ITEM_KEY];
}
if (Object.keys(selectedItems).length > 0) {
refinementsQuery = { refinements: selectedItems };
}
if (this.props.handleFilterApply) {
this.props.handleFilterApply(selectedItems, info);
} else {
this.reloadByQuery({
...refinementsQuery,
...sortQuery
});
}
}
handleFilterReset = () => {
if (!this.props.filterInBackground) {
this.closeFilterModal();
}
if (this.props.handleFilterReset) {
this.props.handleFilterReset();
} else {
this.reloadWithReset();
}
}
handleSortChange = (selectedItems?: Record<string, string[]>) => (
sortItem: CommerceTypes.SortingOption
) => {
let refinementsQuery: CommerceTypes.ProductQuery = {};
if (selectedItems && Object.keys(selectedItems).length > 0) {
refinementsQuery = {refinements: selectedItems};
}
this.closeSortModal();
if (this.props.handleSortChange) {
this.props.handleSortChange(sortItem.id);
} else if (sortItem.id === 'default') {
this.reloadByQuery({
sortBy: undefined,
...refinementsQuery
});
} else {
this.reloadByQuery({
sortBy: sortItem.id,
...refinementsQuery
});
}
}
reloadByQuery = (query: CommerceTypes.ProductQuery) => {
this.setState({ isLoading: true, hasFetchError: false });
this.fetchByExtraQuery(query)
.then((data: CommerceTypes.ProductIndex) => {
this.handleNewData(data);
this.setState({
isLoading: false
});
})
.catch(() => {
this.setState({
isLoading: false,
hasFetchError: true
});
});
}
reloadWithReset = () => {
this.setState({ isLoading: true, hasFetchError: false });
const productQuery: CommerceTypes.ProductQuery = { ...this.props.productQuery };
delete productQuery.refinements;
delete productQuery.sortBy;
let fetchProducts = null;
if (this.props.fetchProducts) {
fetchProducts = this.props.fetchProducts(productQuery);
} else if (this.props.commerceDataSource && this.props.commerceDataSource.fetchProductIndex) {
fetchProducts = this.props.commerceDataSource.fetchProductIndex(productQuery);
} else {
throw new Error('FSProductIndex: [props.fetchProducts] '
+ 'or [props.commerceDataSource.fetchProductIndex] is required');
}
fetchProducts
.then(data => {
if (this.props.filterInBackground) {
this.closeFilterModal();
}
this.handleNewData(data);
this.setState({
isLoading: false
});
})
.catch(() => {
this.setState({
isLoading: false,
hasFetchError: true
});
});
}
handleNewData = (data: CommerceTypes.ProductIndex) => {
const newState: any = {};
const maxPageLoaded = data.page || 1;
const maxCount = this.maxCount(data);
if (this.props.onLoadComplete) {
this.props.onLoadComplete(this.loadMore, maxPageLoaded < this.maxPage(data),
maxCount, maxCount);
}
this.setState(newState);
if (this.props.commerceLoadData) {
this.props.commerceLoadData(data);
}
}
/**
* refetch commerce data and preserve existing sort/filter
*/
fetchByExtraQuery = async (
query: CommerceTypes.ProductQuery
): Promise<CommerceTypes.ProductIndex> => {
const {
commerceDataSource
} = this.props;
const newQuery = this.newProductQuery(query);
if (this.props.fetchProducts) {
return this.props.fetchProducts(newQuery);
} else if (commerceDataSource) {
return commerceDataSource.fetchProductIndex(newQuery);
} else {
throw new Error('FSProductIndex: [props.fetchProducts] '
+ 'or [props.commerceDataSource.fetchProductIndex] is required');
}
}
newProductQuery = (query: CommerceTypes.ProductQuery) => {
const {
commerceDataSource,
productQuery,
commerceData
} = this.props;
const newQuery: CommerceTypes.ProductQuery = {
...productQuery
};
if (commerceData) {
if (commerceData.selectedSortingOption) {
newQuery.sortBy = commerceData.selectedSortingOption;
}
if (commerceData.selectedRefinements &&
Object.keys(commerceData.selectedRefinements).length >
((commerceDataSource && commerceDataSource.minRefinements) || 0)
) {
newQuery.refinements = commerceData.selectedRefinements;
}
Object.assign(newQuery, query);
}
return newQuery;
}
renderCancelButton = (onPress: () => void) => {
return (
<TouchableOpacity
style={styles.cancelButton}
onPress={onPress}
activeOpacity={0.7}
>
<Image source={images.close} />
</TouchableOpacity>
);
}
renderCancelText = (onPress: () => void) => {
return (
<TouchableOpacity
style={[S.modalHeaderClose, this.props.modalCancelStyle]}
onPress={onPress}
>
<Text style={S.modalHeaderCloseText}>
{FSI18n.string(componentTranslationKeys.cancel)}
</Text>
</TouchableOpacity>
);
}
renderModalHeader = ({ title, onPress }: any) => {
const drilldownStyle =
this.props.filterType === 'drilldown'
? { height: 50, paddingTop: 0 }
: null;
return (
<View
style={[S.modalHeader, drilldownStyle, this.props.modalHeaderStyle]}
>
{
this.props.headerWithCancelButton
? this.renderCancelButton(onPress)
: this.renderCancelText(onPress)
}
<Text style={[S.modalHeaderText, this.props.modalHeaderTextStyle]}>
{title}
</Text>
</View>
);
}
// tslint:disable-next-line:cyclomatic-complexity
renderSortModal = () => {
if (this.props.hideActionBar) {
return null;
}
const { commerceData, defaultSortOption } = this.props;
let content = null;
const sortOptions = commerceData?.sortingOptions ? [...commerceData.sortingOptions] : [];
if (defaultSortOption) {
sortOptions.unshift({
id: 'default',
title: defaultSortOption
});
}
const selectedOption = commerceData?.selectedSortingOption ?
commerceData?.selectedSortingOption : 'default';
const selectedItems: Record<string, string[]> | undefined = this.props.mergeSortToFilter &&
commerceData?.selectedSortingOption
? this.mergeSelectedRefinementsAndSort(
commerceData.selectedRefinements,
commerceData.selectedSortingOption
)
: commerceData?.selectedRefinements;
if (commerceData) {
if (this.props.renderSort) {
content = this.props.renderSort(this.handleSortChange(selectedItems), commerceData);
} else if (sortOptions) {
content = (
<SafeAreaView style={S.modalContainer}>
{this.renderModalHeader({
title: this.props.sortHeaderStyle || 'Sort By',
onPress: this.closeSortModal
})}
<SelectableList
items={sortOptions}
onChange={this.handleSortChange(selectedItems)}
selectedId={selectedOption}
{...this.props.sortListProps}
/>
</SafeAreaView>
);
}
}
return this.renderModal({
content,
visible: this.state.sortModalVisible,
closeModal: this.closeSortModal
});
}
renderModal = ({ content, visible, closeModal }: any) => {
const SelectedModal: any =
this.props.modalType === 'half-screen' ? ModalHalfScreen : Modal;
return (
<SelectedModal
animationType={this.props.modalAnimationType || 'slide'}
visible={visible}
onRequestClose={closeModal}
>
{content}
{this.state.isLoading &&
this.props.filterInBackground &&
(this.props.renderModalLoading ? (
this.props.renderModalLoading()
) : (
<View style={S.modelLoadingContainer}>
<Loading />
</View>
))}
</SelectedModal>
);
}
mergeRefinementsAndSort = (
refinementsData?: CommerceTypes.Refinement[],
sortingData?: CommerceTypes.SortingOption[]
) => {
const refinements = refinementsData ? [...refinementsData] : [];
refinements.unshift({
id: SORT_ITEM_KEY,
title: 'Sort By',
values: sortingData ? sortingData.map((item: CommerceTypes.SortingOption) => ({
id: item.id,
value: item.id,
title: item.title
})) : []
});
return refinements;
}
mergeSelectedRefinementsAndSort = (
selectedRefinements: Record<string, string[]> | undefined,
selectedSortId: string
): Record<string, string[]> => {
return {
...selectedRefinements,
[SORT_ITEM_KEY]: [selectedSortId]
};
}
// tslint:disable-next-line:cyclomatic-complexity
renderFilterModal = () => {
if (this.props.hideActionBar) {
return null;
}
const { commerceData } = this.props;
if (!(commerceData && commerceData.refinements)) {
return null;
}
let content = null;
if (this.props.renderFilter) {
content = this.props.renderFilter(
this.handleFilterApply,
this.handleFilterReset,
commerceData
);
} else if (this.props.filterType === 'drilldown') {
const items = this.props.mergeSortToFilter
? this.mergeRefinementsAndSort(
commerceData.refinements,
commerceData.sortingOptions
)
: commerceData.refinements;
const selectedItems = this.props.mergeSortToFilter && commerceData.selectedSortingOption
? this.mergeSelectedRefinementsAndSort(
commerceData.selectedRefinements,
commerceData.selectedSortingOption
)
: commerceData.selectedRefinements;
content = (
<SafeAreaView style={S.modalContainer}>
<FilterListDrilldown
items={items}
onApply={this.handleFilterApply}
onReset={this.handleFilterReset}
onClose={this.props.showDrilldownClose ? this.closeFilterModal : undefined}
selectedItems={selectedItems}
renderFilterItem={this.renderItemForCombinedFilterAndSort}
renderFilterItemValue={this.renderItemValueForCombinedFilterAndSort}
applyOnSelect={this.props.filterInBackground}
singleFilterIds={
this.props.mergeSortToFilter ? [SORT_ITEM_KEY] : undefined
}
{...this.props.FilterListDrilldownProps}
/>
</SafeAreaView>
);
} else {
content = (
<View style={S.modalContainer}>
{this.renderModalHeader({
title: this.props.filterHeaderTitle || 'Filter By',
onPress: this.closeFilterModal
})}
<FilterList
items={commerceData.refinements || []}
onApply={this.handleFilterApply}
onReset={this.handleFilterReset}
selectedItems={commerceData.selectedRefinements}
{...this.props.filterListProps}
/>
</View>
);
}
return this.renderModal({
content,
visible: this.state.filterModalVisible,
closeModal: this.closeFilterModal
});
}
renderItemForCombinedFilterAndSort = (
item: FilterItem,
index: number,
selectedValues: string[],
handlePress: () => void,
renderFilterItem: (
info: Omit<ListRenderItemInfo<FilterItem>, 'separators'>,
skipCustomRender: boolean
) => JSX.Element
) => {
if (item.id === SORT_ITEM_KEY) {
return (
<View>
{renderFilterItem({item, index}, true)}
<View style={{ padding: 15, backgroundColor: '#eee' }}>
<Text>
{FSI18n.string(componentTranslationKeys.filterBy)}
</Text>
</View>
</View>
);
} else {
return renderFilterItem({item, index}, true);
}
}
renderItemValueForCombinedFilterAndSort = (
item: FilterItem,
index: number,
value: FilterItemValue,
handleSelect: () => void,
selected: boolean,
renderFilterItemValue: (
item: FilterItem,
skipCustomRender?: boolean
) => (info: Omit<ListRenderItemInfo<FilterItemValue>, 'separators'>) => JSX.Element
) => {
if (item.id === SORT_ITEM_KEY) {
const selectableRowProps =
this.props.FilterListDrilldownProps &&
this.props.FilterListDrilldownProps.selectableRowProps;
return (
<SelectableRow
key={index}
title={value.title}
selected={selected}
onPress={this.handleSortSelectedInRefine(value, selected)}
{...selectableRowProps}
/>
);
} else {
return renderFilterItemValue(item, true)({item: value, index});
}
}
handleSortSelectedInRefine = (value: any, selected: any) => () => {
this.closeFilterModal();
// TODO: Test is needed.
this.handleSortChange(selected)(value);
}
renderNoResult = () => {
const { commerceDataSource, commerceData } = this.props;
if (!commerceData) {
return null;
}
if (this.props.renderNoResult) {
return this.props.renderNoResult(
commerceData,
this.handleFilterReset
);
}
const shouldShowReset =
commerceData.selectedRefinements &&
Object.keys(commerceData.selectedRefinements).length >
((commerceDataSource && commerceDataSource.minRefinements) || 0);
return (
<View style={S.noResultContainer}>
<Text style={S.noResultText}>
{FSI18n.string(componentTranslationKeys.noResults)}
</Text>
{shouldShowReset && (
<TouchableOpacity
style={S.resetButton}
onPress={this.handleFilterReset}
>
<Text>
{FSI18n.string(componentTranslationKeys.resetFilters)}
</Text>
</TouchableOpacity>
)}
</View>
);
}
loadPage = (page: number) => {
const {
commerceData,
commerceProviderLoadMore
} = this.props;
if (!commerceData) {
// Cannot load more
return;
}
this.setState({
isMoreLoading: true
});
const currentPage = commerceData.page || -1;
const newQuery = this.newProductQuery({
page,
prevCursor: page < currentPage ? commerceData.prevCursor : undefined,
nextCursor: page > currentPage ? commerceData.nextCursor : undefined
});
if (commerceProviderLoadMore) {
commerceProviderLoadMore(newQuery)
.then((data: CommerceTypes.ProductIndex) => {
this.handleNewData(data);
this.setState({
isMoreLoading: false
});
})
.catch(() => {
this.setState({
isMoreLoading: false
});
});
}
}
loadPrev = () => {
const { commerceData } = this.props;
if (commerceData) {
this.loadPage((commerceData.minPage || commerceData.page || 1) - 1);
}
}
loadMore = () => {
const { commerceData } = this.props;
if (commerceData) {
this.loadPage((commerceData.page || 1) + 1);
}
}
renderFooter = () => {
const { commerceData } = this.props;
// TODO: Completely move this logic into the normalizers to populate the "hasNextPage"
let hasAnotherPage = false;
if (commerceData?.hasNextPage !== undefined) {
hasAnotherPage = commerceData?.hasNextPage;
} else {
hasAnotherPage = (
commerceData?.page !== undefined &&
commerceData.page < this.maxPage(commerceData)
);
}
if (this.state.isMoreLoading) {
return this.props.renderLoading ? (
this.props.renderLoading()
) : (
<Loading
style={[
S.loading,
S.loadingLoadMore,
this.props.loadMoreLoadingStyle
]}
/>
);
}
if (this.props.renderLoadMore) {
return this.props.renderLoadMore(
this.loadMore,
hasAnotherPage
);
}
if (!hasAnotherPage) {
return null;
}
return (
<View style={S.footer}>
<TouchableOpacity
style={[S.loadMoreButton, this.props.loadMoreButtonStyle]}
onPress={this.loadMore}
>
<Text style={this.props.loadMoreButtonTextStyle}>
{FSI18n.string(componentTranslationKeys.loadMore)}
</Text>
</TouchableOpacity>
</View>
);
}
// tslint:disable-next-line: cyclomatic-complexity
render(): React.ReactNode {
const {
commerceData,
listStyle,
columns,
gridProps,
loadingStyle,
errorText,
errorTextStyle,
containerStyle
} = this.props;
if (this.state.isLoading && !this.props.filterInBackground) {
if (this.props.renderGhost) {
return this.props.renderGhost();
}
return <Loading style={[S.loading, loadingStyle]} />;
}
if (this.state.hasFetchError) {
return (
<Text style={[S.error, errorTextStyle]}>
{errorText || defaultErrorMessage}
</Text>
);
}
if (!commerceData || !commerceData.products || !commerceData.products.length) {
return this.renderNoResult();
}
return (
<View style={[S.container, containerStyle]}>
<ProductList
style={[S.list, listStyle]}
columns={columns}
items={commerceData.products}
renderItem={this.renderItem}
renderHeader={this.renderHeader}
renderFooter={this.renderFooter}
gridProps={gridProps}
/>
{this.state.sortModalVisible && this.renderSortModal()}
{this.state.filterModalVisible && this.renderFilterModal()}
</View>
);
}
}
function getVariantText(item: CommerceTypes.Product): string {
return (item.options || [])
.map(option => {
if (option.values && option.values.length > 1) {
return pluralize(option.name, option.values.length, true);
} else {
return '';
}
})
.join(' ');
} | the_stack |
namespace eui {
let scrollerThrowEvent:ScrollerThrowEvent;
/**
* @private
*/
const enum Keys {
scrollPolicyV,
scrollPolicyH,
autoHideTimer,
touchStartX,
touchStartY,
touchMoved,
horizontalCanScroll,
verticalCanScroll,
touchScrollH,
touchScrollV,
viewport,
viewprotRemovedEvent, //表示是被移除触发的viewport设空
touchCancle
}
/**
* The Scroller component displays a single scrollable component,
* called a viewport, and horizontal and vertical scroll bars.
* The viewport must implement the IViewport interface.
* <p>The Group components implement the IViewport interface
* and can be used as the children of the Scroller control,
* as the following example shows:</p>
* <pre>
* <s:Scroller width="100" height="100">
* <s:Group>
* <s:Image width="300" height="400" source="assets/logo.jpg"/>
* </s:Group>
* </s:Scroller>
* </pre>
* <p>The size of the Image control is set larger than that of its parent Group container.
* By default, the child extends past the boundaries of the parent container.
* Rather than allow the child to extend past the boundaries of the parent container,
* the Scroller specifies to clip the child to the boundaries and display scroll bars.</p>
*
* @event eui.UIEvent.CHANGE_START Dispatched when the scroll position is going to change
* @event eui.UIEvent.CHANGE_END Dispatched when the scroll position changed complete
* @event egret.Event.CHANGE Dispatched when the scroll position is changing
* @event egret.TouchEvent.TOUCH_CANCEL canceled the touch
*
* @defaultProperty viewport
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @includeExample extension/eui/components/ScrollerExample.ts
* @language en_US
*/
/**
* Scroller 组件显示一个称为视域的单个可滚动组件,以及水平滚动条和垂直滚动条。该视域必须实现 IViewport 接口。
* <p>Group 组件实现 IViewport 接口,且可以用作 Scroller 控件的子代,如下例所示:</p>
* <pre>
* <s:Scroller width="100" height="100">
* <s:Group>
* <s:Image width="300" height="400" source="assets/logo.jpg"/>
* </s:Group>
* </s:Scroller>
* </pre>
* Image 控件的大小比其父 Group 容器设置得大。默认情况下,子代超过父容器的边界。
* Scroller 会指定将子代剪切到边界并显示滚动条,而不是让子代超过父容器的边界。
*
* @event eui.UIEvent.CHANGE_START 滚动位置改变开始
* @event eui.UIEvent.CHANGE_END 滚动位置改变结束
* @event egret.Event.CHANGE 滚动位置改变的时候
* @event egret.TouchEvent.TOUCH_CANCEL 取消触摸事件
*
* @defaultProperty viewport
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @includeExample extension/eui/components/ScrollerExample.ts
* @language zh_CN
*/
export class Scroller extends Component {
/**
* The threshold value(in pixels) trigger the rolling.
* when the touch points deviate from the initial touch point than this value will trigger the rolling.
*
* @default 5
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 开始触发滚动的阈值(以像素为单位),当触摸点偏离初始触摸点的距离超过这个值时才会触发滚动。
*
* @default 5
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public static scrollThreshold:number = 5;
/**
* Constructor.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 构造函数。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public constructor() {
super();
let touchScrollH = new sys.TouchScroll(this.horizontalUpdateHandler, this.horizontalEndHandler, this);
let touchScrollV = new sys.TouchScroll(this.verticalUpdateHandler, this.verticalEndHanlder, this);
this.$Scroller = {
0: "auto", //scrollPolicyV,
1: "auto", //scrollPolicyH,
2: null, //autoHideTimer,
3: 0, //touchStartX,
4: 0, //touchStartY,
5: false, //touchMoved,
6: false, //horizontalCanScroll,
7: false, //verticalCanScroll,
8: touchScrollH, //touchScrollH,
9: touchScrollV, //touchScrollV
10: null, //viewport
11: false, //viewprotRemovedEvent
12: false //touchCancle
};
}
private $bounces:boolean = true;
/**
* Whether to enable rebound, rebound When enabled, ScrollView contents allowed to continue to drag the border after arriving at the end user drag operation, and then bounce back boundary position
* @default true
* @version Egret 2.5.6
* @language en_US
*/
/**
* 是否启用回弹,当启用回弹后,ScrollView中内容在到达边界后允许继续拖动,在用户拖动操作结束后,再反弹回边界位置
* @default true
* @version Egret 2.5.6
* @language zh_CN
*/
public get bounces():boolean {
return this.$bounces;
}
public set bounces(value:boolean) {
this.$bounces = !!value;
let touchScrollH = this.$Scroller[Keys.touchScrollH];
if (touchScrollH) {
touchScrollH.$bounces = this.$bounces;
}
let touchScrollV = this.$Scroller[Keys.touchScrollV];
if (touchScrollV) {
touchScrollV.$bounces = this.$bounces;
}
}
/**
* Adjust the speed to get out of the slide end.When equal to 0,the scroll animation will not be play.
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 调节滑动结束时滚出的速度。等于0时,没有滚动动画
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public set throwSpeed(val:number) {
val = +val;
if (val < 0) val = 0;
this.$Scroller[Keys.touchScrollH].$scrollFactor = val;
this.$Scroller[Keys.touchScrollV].$scrollFactor = val;
}
public get throwSpeed():number {
return this.$Scroller[Keys.touchScrollH].$scrollFactor;
}
/**
* @private
*/
$getThrowInfo(currentPos:number, toPos:number):eui.ScrollerThrowEvent {
if (!scrollerThrowEvent) {
scrollerThrowEvent = new eui.ScrollerThrowEvent(ScrollerThrowEvent.THROW, false, false, currentPos, toPos);
}
else {
scrollerThrowEvent.currentPos = currentPos;
scrollerThrowEvent.toPos = toPos;
}
return scrollerThrowEvent;
}
/**
* @private
*/
$Scroller:Object;
/**
* the horizontal scroll bar
*
* @skinPart
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 水平滚动条
*
* @skinPart
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public horizontalScrollBar:eui.HScrollBar = null;
/**
* the vertical scroll bar
*
* @skinPart
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 垂直滚动条
*
* @skinPart
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public verticalScrollBar:eui.VScrollBar = null;
/**
* Indicates under what conditions the scroller can be moved and the vertical scroll bar is displayed.
* <p><code>ScrollPolicy.ON</code> - the scroller can be moved, and the scroll bar is displayed when it's move.</p>
* <p><code>ScrollPolicy.OFF</code> - the scroller can not be moved, the scroll bar is never displayed.</p>
* <p><code>ScrollPolicy.AUTO</code> - the scroller can not be moved when
* the viewport's contentHeight is larger than its height. the scroll bar is displayed when it's move.
*
* @default ScrollPolicy.AUTO
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 指示在哪些条件可以滚动并且显示垂直滑动条。
* <p><code>ScrollPolicy.ON</code> - 可以滚动,滚动时显示滚动条。</p>
* <p><code>ScrollPolicy.OFF</code> - 不可以滚动并且不显示滚动条。</p>
* <p><code>ScrollPolicy.AUTO</code> - 当视域的 contentHeight 大于其自身的高度时可以滚动,滚动时显示滚动条。</p>
*
* @default ScrollPolicy.AUTO
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get scrollPolicyV():string {
return this.$Scroller[Keys.scrollPolicyV];
}
public set scrollPolicyV(value:string) {
let values = this.$Scroller;
if (values[Keys.scrollPolicyV] == value) {
return;
}
values[Keys.scrollPolicyV] = value;
this.checkScrollPolicy();
}
/**
* Indicates under what conditions the scroller can be moved and the horizontal scroll bar is displayed.
* <p><code>ScrollPolicy.ON</code> - the scroller can be moved, and the scroll bar is displayed when it's move.</p>
* <p><code>ScrollPolicy.OFF</code> - the scroller can not be moved, the scroll bar is never displayed.</p>
* <p><code>ScrollPolicy.AUTO</code> - the can not be moved when
* the viewport's contentWidth is larger than its width. the scroll bar is displayed when it's move.
*
* @default ScrollPolicy.AUTO
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 指示在哪些条件下可以滚动并且显示水平滑动条。
* <p><code>ScrollPolicy.ON</code> - 可以滚动,滚动时显示滚动条。</p>
* <p><code>ScrollPolicy.OFF</code> - 不可以滚动并且不显示滚动条。</p>
* <p><code>ScrollPolicy.AUTO</code> - 当视域的 contentWidth 大于其自身的宽度时可以滚动,滚动时显示滚动条。</p>
*
* @default ScrollPolicy.AUTO
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get scrollPolicyH():string {
return this.$Scroller[Keys.scrollPolicyH];
}
public set scrollPolicyH(value:string) {
let values = this.$Scroller;
if (values[Keys.scrollPolicyH] == value) {
return;
}
values[Keys.scrollPolicyH] = value;
this.checkScrollPolicy();
}
/**
* Stop the scroller animation
* @version Egret 3.0.2
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 停止滚动的动画
*
* @version Egret 3.0.2
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public stopAnimation():void {
let values = this.$Scroller;
let scrollV = values[Keys.touchScrollV];
let scrollH = values[Keys.touchScrollH];
if (scrollV.animation.isPlaying) {
UIEvent.dispatchUIEvent(this, UIEvent.CHANGE_END);
} else if (scrollH.animation.isPlaying) {
UIEvent.dispatchUIEvent(this, UIEvent.CHANGE_END);
}
scrollV.stop();
scrollH.stop();
let verticalBar = this.verticalScrollBar;
let horizontalBar = this.horizontalScrollBar;
if (verticalBar && verticalBar.autoVisibility) {
verticalBar.visible = false;
}
if (horizontalBar && horizontalBar.autoVisibility) {
horizontalBar.visible = false;
}
}
/**
* The viewport component to be scrolled.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 要滚动的视域组件。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get viewport():IViewport {
return this.$Scroller[Keys.viewport];
}
public set viewport(value:IViewport) {
let values = this.$Scroller;
if (value == values[Keys.viewport])
return;
this.uninstallViewport();
values[Keys.viewport] = value;
values[Keys.viewprotRemovedEvent] = false;
this.installViewport();
}
/**
* @private
* 安装并初始化视域组件
*/
private installViewport():void {
let viewport = this.viewport;
if (viewport) {
this.addChildAt(viewport, 0);
viewport.scrollEnabled = true;
viewport.addEventListener(egret.TouchEvent.TOUCH_BEGIN, this.onTouchBeginCapture, this, true);
viewport.addEventListener(egret.TouchEvent.TOUCH_END, this.onTouchEndCapture, this, true);
viewport.addEventListener(egret.TouchEvent.TOUCH_TAP, this.onTouchTapCapture, this, true);
viewport.addEventListener(egret.Event.REMOVED, this.onViewPortRemove, this);
}
if (this.horizontalScrollBar) {
this.horizontalScrollBar.viewport = viewport;
}
if (this.verticalScrollBar) {
this.verticalScrollBar.viewport = viewport;
}
}
/**
* @private
* 卸载视域组件
*/
private uninstallViewport():void {
if (this.horizontalScrollBar) {
this.horizontalScrollBar.viewport = null;
}
if (this.verticalScrollBar) {
this.verticalScrollBar.viewport = null;
}
let viewport = this.viewport;
if (viewport) {
viewport.scrollEnabled = false;
viewport.removeEventListener(egret.TouchEvent.TOUCH_BEGIN, this.onTouchBeginCapture, this, true);
viewport.removeEventListener(egret.TouchEvent.TOUCH_END, this.onTouchEndCapture, this, true);
viewport.removeEventListener(egret.TouchEvent.TOUCH_TAP, this.onTouchTapCapture, this, true);
viewport.removeEventListener(egret.Event.REMOVED, this.onViewPortRemove, this);
if (this.$Scroller[Keys.viewprotRemovedEvent] == false) {
this.removeChild(viewport);
}
}
}
private onViewPortRemove(event:egret.Event):void {
if (event.target == this.viewport) {
this.$Scroller[Keys.viewprotRemovedEvent] = true;
this.viewport = null;
}
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
protected setSkin(skin:Skin):void {
super.setSkin(skin);
let viewport = this.viewport;
if (viewport) {
this.addChildAt(viewport, 0);
}
}
/**
* @private
* @param event
*/
private onTouchBeginCapture(event:egret.TouchEvent):void {
if(!this.$stage) {
return;
}
this.$Scroller[Keys.touchCancle] = false;
let canScroll:boolean = this.checkScrollPolicy();
if (!canScroll) {
return;
}
this.onTouchBegin(event);
}
/**
* @private
* @param event
*/
private onTouchEndCapture(event:egret.TouchEvent):void {
if (this.$Scroller[Keys.touchCancle]) {
event.$bubbles = false;
this.dispatchBubbleEvent(event);
event.$bubbles = true;
event.stopPropagation();
this.onTouchEnd(event);
}
}
/**
* @private
* @param event
*/
private onTouchTapCapture(event:egret.TouchEvent):void {
if (this.$Scroller[Keys.touchCancle]) {
event.$bubbles = false;
this.dispatchBubbleEvent(event);
event.$bubbles = true;
event.stopPropagation();
}
}
/**
* @private
* 检查当前滚动策略,若有一个方向可以滚动,返回true。
*/
private checkScrollPolicy():boolean {
let values = this.$Scroller;
let viewport:IViewport = values[Keys.viewport];
if (!viewport) {
return false;
}
let hCanScroll:boolean;
let uiValues = viewport.$UIComponent;
switch (values[Keys.scrollPolicyH]) {
case "auto":
if (viewport.contentWidth > uiValues[sys.UIKeys.width] || viewport.scrollH !== 0) {
hCanScroll = true;
}
else {
hCanScroll = false;
}
break;
case "on":
hCanScroll = true;
break;
case "off":
hCanScroll = false;
break;
}
values[Keys.horizontalCanScroll] = hCanScroll;
let vCanScroll:boolean;
switch (values[Keys.scrollPolicyV]) {
case "auto":
if (viewport.contentHeight > uiValues[sys.UIKeys.height] || viewport.scrollV !== 0) {
vCanScroll = true;
}
else {
vCanScroll = false;
}
break;
case "on":
vCanScroll = true;
break;
case "off":
vCanScroll = false;
break;
}
values[Keys.verticalCanScroll] = vCanScroll;
return hCanScroll || vCanScroll;
}
/**
* @private
* 记录按下的对象,touchCancle时使用
*/
private downTarget:egret.DisplayObject;
private tempStage:egret.Stage;
/**
* @private
*
* @param event
*/
private onTouchBegin(event:egret.TouchEvent):void {
if (event.isDefaultPrevented()) {
return;
}
if (!this.checkScrollPolicy()) {
return;
}
this.downTarget = event.target;
let values = this.$Scroller;
this.stopAnimation();
values[Keys.touchStartX] = event.$stageX;
values[Keys.touchStartY] = event.$stageY;
if (values[Keys.horizontalCanScroll]) {
values[Keys.touchScrollH].start(event.$stageX);
}
if (values[Keys.verticalCanScroll]) {
values[Keys.touchScrollV].start(event.$stageY);
}
let stage = this.$stage;
this.addEventListener(egret.TouchEvent.TOUCH_MOVE, this.onTouchMove, this);
stage.addEventListener(egret.TouchEvent.TOUCH_END, this.onTouchEnd, this, true);
this.addEventListener(egret.TouchEvent.TOUCH_CANCEL, this.onTouchCancel, this);
this.addEventListener(egret.Event.REMOVED_FROM_STAGE, this.onRemoveListeners, this);
this.tempStage = stage;
}
/**
* @private
*
* @param event
*/
private onTouchMove(event:egret.TouchEvent):void {
if (event.isDefaultPrevented()) {
return;
}
let values = this.$Scroller;
if (!values[Keys.touchMoved]) {
let outX:boolean;
if (Math.abs(values[Keys.touchStartX] - event.$stageX) < Scroller.scrollThreshold) {
outX = false;
} else {
outX = true;
}
let outY:boolean;
if (Math.abs(values[Keys.touchStartY] - event.$stageY) < Scroller.scrollThreshold) {
outY = false;
} else {
outY = true;
}
if (!outX && !outY) {
return;
}
if (!outY && outX && values[Keys.scrollPolicyH] == 'off') {
return;
}
if (!outX && outY && values[Keys.scrollPolicyV] == 'off') {
return;
}
values[Keys.touchCancle] = true;
values[Keys.touchMoved] = true;
this.dispatchCancelEvent(event);
let horizontalBar = this.horizontalScrollBar;
let verticalBar = this.verticalScrollBar;
if (horizontalBar && horizontalBar.autoVisibility && values[Keys.horizontalCanScroll]) {
horizontalBar.visible = true;
}
if (verticalBar && verticalBar.autoVisibility && values[Keys.verticalCanScroll]) {
verticalBar.visible = true;
}
if (values[Keys.autoHideTimer]) {
values[Keys.autoHideTimer].reset();
}
UIEvent.dispatchUIEvent(this, UIEvent.CHANGE_START);
this.$stage.addEventListener(egret.TouchEvent.TOUCH_MOVE, this.onTouchMove, this);
}
event.preventDefault();
let viewport = values[Keys.viewport];
let uiValues = viewport.$UIComponent;
if (values[Keys.horizontalCanScroll]) {
values[Keys.touchScrollH].update(event.$stageX, viewport.contentWidth - uiValues[sys.UIKeys.width], viewport.scrollH);
}
if (values[Keys.verticalCanScroll]) {
values[Keys.touchScrollV].update(event.$stageY, viewport.contentHeight - uiValues[sys.UIKeys.height], viewport.scrollV);
}
}
/**
* @private
* @param event
*/
private onTouchCancel(event:egret.TouchEvent):void {
if (!this.$Scroller[Keys.touchMoved]) {
this.onRemoveListeners();
}
}
/**
* @private
* @param event
*/
private dispatchBubbleEvent(event:egret.TouchEvent) {
let viewport = this.$Scroller[Keys.viewport];
if (!viewport) {
return;
}
let cancelEvent = egret.Event.create(egret.TouchEvent, event.type, event.bubbles, event.cancelable);
cancelEvent.$initTo(event.$stageX,event.$stageY,event.touchPointID);
let target:egret.DisplayObject = this.downTarget;
cancelEvent.$setTarget(target);
let list = this.$getPropagationList(target);
let length = list.length;
let targetIndex = list.length * 0.5;
let startIndex = -1;
for (let i = 0; i < length; i++) {
if (list[i] === viewport) {
startIndex = i;
break;
}
}
list.splice(0, list.length - startIndex + 1);
targetIndex = 0;
this.$dispatchPropagationEvent(cancelEvent, list, targetIndex);
egret.Event.release(cancelEvent);
}
/**
* @private
* @param event
*/
private dispatchCancelEvent(event:egret.TouchEvent) {
let viewport = this.$Scroller[Keys.viewport];
if (!viewport) {
return;
}
let cancelEvent = egret.Event.create(egret.TouchEvent, egret.TouchEvent.TOUCH_CANCEL, event.bubbles, event.cancelable);
cancelEvent.$initTo(event.$stageX,event.$stageY,event.touchPointID);
let target:egret.DisplayObject = this.downTarget;
cancelEvent.$setTarget(target);
let list = this.$getPropagationList(target);
let length = list.length;
let targetIndex = list.length * 0.5;
let startIndex = -1;
for (let i = 0; i < length; i++) {
if (list[i] === viewport) {
startIndex = i;
break;
}
}
list.splice(0, startIndex + 1 - 2);
list.splice(list.length - 1 - startIndex + 2, startIndex + 1 - 2);
targetIndex -= startIndex + 1;
this.$dispatchPropagationEvent(cancelEvent, list, targetIndex);
egret.Event.release(cancelEvent);
}
/**
* @private
* @param event
*/
private onTouchEnd(event:egret.Event):void {
let values = this.$Scroller;
values[Keys.touchMoved] = false;
this.onRemoveListeners();
let viewport:IViewport = values[Keys.viewport];
let uiValues = viewport.$UIComponent;
if (values[Keys.touchScrollH].isStarted()) {
values[Keys.touchScrollH].finish(viewport.scrollH, viewport.contentWidth - uiValues[sys.UIKeys.width]);
}
if (values[Keys.touchScrollV].isStarted()) {
values[Keys.touchScrollV].finish(viewport.scrollV, viewport.contentHeight - uiValues[sys.UIKeys.height]);
}
}
/**
* @private
*/
private onRemoveListeners():void {
let stage = this.tempStage || this.$stage;
this.removeEventListener(egret.TouchEvent.TOUCH_MOVE, this.onTouchMove, this);
stage.removeEventListener(egret.TouchEvent.TOUCH_END, this.onTouchEnd, this, true);
stage.removeEventListener(egret.TouchEvent.TOUCH_MOVE, this.onTouchMove, this);
this.removeEventListener(egret.TouchEvent.TOUCH_CANCEL, this.onTouchCancel, this);
this.removeEventListener(egret.Event.REMOVED_FROM_STAGE, this.onRemoveListeners, this);
let values = this.$Scroller;
let viewport:IViewport = values[Keys.viewport];
let uiValues = viewport.$UIComponent;
if (values[Keys.touchScrollH].isStarted()) {
values[Keys.touchScrollH].finish(viewport.scrollH, viewport.contentWidth - uiValues[sys.UIKeys.width]);
}
if (values[Keys.touchScrollV].isStarted()) {
values[Keys.touchScrollV].finish(viewport.scrollV, viewport.contentHeight - uiValues[sys.UIKeys.height]);
}
}
/**
* @private
*
* @param scrollPos
*/
private horizontalUpdateHandler(scrollPos: number): void {
const viewport = this.$Scroller[Keys.viewport];
if (viewport) {
viewport.scrollH = scrollPos;
}
this.dispatchEventWith(egret.Event.CHANGE);
}
/**
* @private
*
* @param scrollPos
*/
private verticalUpdateHandler(scrollPos: number): void {
const viewport = this.$Scroller[Keys.viewport];
if (viewport) {
viewport.scrollV = scrollPos;
}
this.dispatchEventWith(egret.Event.CHANGE);
}
/**
* @private
*
*/
private horizontalEndHandler():void {
if (!this.$Scroller[Keys.touchScrollV].isPlaying()) {
this.onChangeEnd();
}
}
/**
* @private
*
*/
private verticalEndHanlder():void {
if (!this.$Scroller[Keys.touchScrollH].isPlaying()) {
this.onChangeEnd();
}
}
/**
* @private
*
*/
private onChangeEnd():void {
let values = this.$Scroller;
let horizontalBar = this.horizontalScrollBar;
let verticalBar = this.verticalScrollBar;
if (horizontalBar && horizontalBar.visible || verticalBar && verticalBar.visible) {
if (!values[Keys.autoHideTimer]) {
values[Keys.autoHideTimer] = new egret.Timer(200, 1);
values[Keys.autoHideTimer].addEventListener(egret.TimerEvent.TIMER_COMPLETE, this.onAutoHideTimer, this);
}
values[Keys.autoHideTimer].reset();
values[Keys.autoHideTimer].start();
}
UIEvent.dispatchUIEvent(this, UIEvent.CHANGE_END);
}
/**
* @private
*
* @param event
*/
private onAutoHideTimer(event:egret.TimerEvent):void {
let horizontalBar = this.horizontalScrollBar;
let verticalBar = this.verticalScrollBar;
if (horizontalBar && horizontalBar.autoVisibility) {
horizontalBar.visible = false;
}
if (verticalBar && verticalBar.autoVisibility) {
verticalBar.visible = false;
}
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
protected updateDisplayList(unscaledWidth:number, unscaledHeight:number):void {
super.updateDisplayList(unscaledWidth, unscaledHeight);
let viewport = this.viewport;
if (viewport) {
//必须先调用setLayoutBoundsSize(),因为尺寸改变会影响布局位置。
viewport.setLayoutBoundsSize(unscaledWidth, unscaledHeight);
viewport.setLayoutBoundsPosition(0, 0);
}
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
protected partAdded(partName:string, instance:any):void {
super.partAdded(partName, instance);
if (instance == this.horizontalScrollBar) {
this.horizontalScrollBar.touchChildren = false;
this.horizontalScrollBar.touchEnabled = false;
this.horizontalScrollBar.viewport = this.viewport;
if (this.horizontalScrollBar.autoVisibility) {
this.horizontalScrollBar.visible = false;
}
}
else if (instance == this.verticalScrollBar) {
this.verticalScrollBar.touchChildren = false;
this.verticalScrollBar.touchEnabled = false;
this.verticalScrollBar.viewport = this.viewport;
if (this.verticalScrollBar.autoVisibility) {
this.verticalScrollBar.visible = false;
}
}
}
}
registerProperty(Scroller, "viewport", "eui.IViewport", true);
} | the_stack |
import { GoogleAnalytics, PackageManager, ProjectConfig, Util } from "@igniteui/cli-core";
import * as path from "path";
import { default as newCmd } from "../../packages/cli/lib/commands/new";
import { PromptSession } from "../../packages/cli/lib/PromptSession";
import { resetSpy } from "../helpers/utils";
describe("Unit - New command", () => {
beforeAll(() => {
spyOn(GoogleAnalytics, "post");
});
beforeEach(() => {
spyOn(Util, "log");
spyOn(Util, "execSync");
spyOn(process, "chdir");
spyOn(PackageManager, "installPackages");
});
afterEach(() => {
// clean test folder:
process.chdir("../../");
});
it("New command in existing project", async done => {
spyOn(Util, "error");
spyOn(ProjectConfig, "hasLocalConfig").and.returnValue(true);
await newCmd.execute({});
expect(Util.error).toHaveBeenCalledWith("There is already an existing project.", "red");
done();
});
it("Should validate and trim name", async done => {
spyOn(Util, "error");
spyOn(ProjectConfig, "getConfig").and.returnValue(null);
const errorCombos = [
{ name: "1 is not valid", inError: "1 is not valid" },
{ name: " 1 is not valid \t ", inError: "1 is not valid" },
{ name: "../newProject", inError: "../newProject" },
{ name: "/newProject", inError: "/newProject" },
{ name: " .newProject", inError: ".newProject" },
{ name: "name!", inError: "name!" },
{ name: "bits~and*bobs()", inError: "bits~and*bobs()" }
];
for (const item of errorCombos) {
resetSpy(Util.error);
await newCmd.execute({name: item.name });
expect(Util.error).toHaveBeenCalledWith(`Name '${item.inError}' is not valid. `
+ "Name should start with a letter and can also contain numbers, dashes and spaces.",
"red");
}
done();
});
it("Logs error for wrong framework", async done => {
spyOn(Util, "error");
//spied getFrameworkById won't return anything, i.e. not found
newCmd.template = jasmine.createSpyObj("TemplateManager", ["getFrameworkById", "getProjectLibrary"]);
await newCmd.execute({ name: "Test", framework: "jq"});
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(Util.error).toHaveBeenCalledWith("Framework not supported", "red");
//no further attempts to get project:
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledTimes(0);
expect(Util.log).toHaveBeenCalledTimes(0);
done();
});
it("Logs error for wrong project type", async done => {
spyOn(Util, "error");
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
// return nothing, i.e. not found
getProjectLibrary: undefined
});
await newCmd.execute({ name: "Test", framework: "jq", type: "js"});
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledWith("jq", "js");
expect(Util.error).toHaveBeenCalledWith(`Project type "js" not found in framework 'jq'`);
//no further attempts to get project:
expect(Util.log).toHaveBeenCalledTimes(0);
done();
});
it("Logs error for wrong project theme", async done => {
spyOn(Util, "error");
const mockProjLib = {
getProject: () => { },
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockProjLib, "getProject");
await newCmd.execute({ name: "Test", framework: "jq", type: "js", theme: "mega-custom"});
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledWith("jq", "js");
expect(Util.error).toHaveBeenCalledWith("Theme not supported");
//no further attempts to get project:
expect(Util.log).toHaveBeenCalledTimes(0);
expect(mockProjLib.getProject).toHaveBeenCalledTimes(0);
done();
});
it("Should start prompt session with missing arg", async done => {
spyOn(ProjectConfig, "getConfig").and.returnValue(null);
const mockProjLib = {};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
const promptSession = PromptSession.prototype;
spyOn(promptSession, "start");
//spied getFrameworkById won't return anything, i.e. not found
await newCmd.execute({});
expect(promptSession.start).toHaveBeenCalled();
done();
});
it("Logs error for unavailable project", async done => {
spyOn(Util, "error");
const mockProjLib = {
getProject: () => { },
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockProjLib, "getProject");
await newCmd.execute({ name: "Test", framework: "jq", type: "type"});
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledWith("jq", "type");
expect(Util.log).toHaveBeenCalledWith("Project Name: Test, framework jq, type type, theme ig");
expect(mockProjLib.getProject).toHaveBeenCalled();
expect(Util.error).toHaveBeenCalledWith("Project template not found");
//no other logs:
expect(Util.log).toHaveBeenCalledTimes(1);
done();
});
it("Generates default without project type", async done => {
const mockDelimiters = { mockDelimiter: { start: "test", end: "test" }};
const mockTemplate = {
delimiters: mockDelimiters,
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "js",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
//spyOn(newCmd.template, "getFrameworkById").and.returnValue({});
//spyOn(newCmd.template, "getProjectLibrary").and.returnValue(mockProjLib);
const mockConfig = { test: "test" };
spyOn(mockTemplate, "generateConfig").and.returnValue(mockConfig);
spyOn(process, "cwd").and.returnValue("Mock dir");
spyOn(Util, "processTemplates").and.returnValue(Promise.resolve(true));
await newCmd.execute({ name: "Test", framework: "jq", theme: "ig" });
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledWith("jq");
expect(Util.log).toHaveBeenCalledWith("Project Name: Test, framework jq, type js, theme ig");
expect(mockTemplate.generateConfig).toHaveBeenCalledWith("Test", "ig");
expect(Util.processTemplates)
.toHaveBeenCalledWith("test", path.join("Mock dir", "Test"), mockConfig, mockDelimiters, false);
expect(PackageManager.installPackages).toHaveBeenCalled();
expect(process.chdir).toHaveBeenCalledWith("Test");
expect(process.chdir).toHaveBeenCalledWith("..");
expect(Util.log).toHaveBeenCalledWith(jasmine.stringMatching("Project Created"));
done();
});
it("Correctly generates passed project type", async done => {
const mockDelimiters = { mockDelimiter: { start: "test", end: "test" }};
const mockTemplate = {
delimiters: mockDelimiters,
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
const mockConfig = { test: "test" };
spyOn(mockTemplate, "generateConfig").and.returnValue(mockConfig);
spyOn(process, "cwd").and.returnValue("Mock dir");
spyOn(Util, "processTemplates").and.returnValue(Promise.resolve(true));
await newCmd.execute({ name: "Test", framework: "jq", type: "type", theme: "ig" });
expect(newCmd.template.getFrameworkById).toHaveBeenCalledWith("jq");
expect(newCmd.template.getProjectLibrary).toHaveBeenCalledWith("jq", "type");
expect(mockTemplate.generateConfig).toHaveBeenCalledWith("Test", "ig");
expect(Util.processTemplates)
.toHaveBeenCalledWith("test", path.join("Mock dir", "Test"), mockConfig, mockDelimiters, false);
expect(PackageManager.installPackages).toHaveBeenCalled();
expect(process.chdir).toHaveBeenCalledWith("Test");
expect(process.chdir).toHaveBeenCalledWith("..");
expect(Util.log).toHaveBeenCalledWith("Project Name: Test, framework jq, type type, theme ig");
expect(Util.log).toHaveBeenCalledWith(jasmine.stringMatching("Project Created"));
done();
});
it("Git initialization", async done => {
const projectName = "projTitle";
const mockTemplate = {
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockTemplate, "generateConfig");
await newCmd.execute({ name: projectName, framework: "jq" });
expect(Util.execSync).toHaveBeenCalledWith("git init", jasmine.any(Object));
expect(Util.execSync).toHaveBeenCalledWith("git add .", jasmine.any(Object));
expect(Util.execSync).toHaveBeenCalledWith("git commit -m " + "\"Initial commit for project: " + projectName + "\"",
jasmine.any(Object));
expect(Util.log).toHaveBeenCalledWith(
jasmine.stringMatching("Git Initialized and Project '" + projectName + "' Committed")
);
done();
});
it("Skip Git initialization with command option", async done => {
const projectName = "projTitle";
const mockTemplate = {
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockTemplate, "generateConfig");
spyOn(Util, "gitInit");
await newCmd.execute({ "name": projectName, "framework": "jq", "skip-git": true });
expect(Util.gitInit).not.toHaveBeenCalled();
done();
});
it("Skip Git initialization with configuration option", async done => {
const projectName = "projTitle";
const mockTemplate = {
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockTemplate, "generateConfig");
spyOn(ProjectConfig, "getConfig").and.returnValue({ skipGit: true });
spyOn(Util, "gitInit");
await newCmd.execute({ name: projectName, framework: "jq" });
expect(Util.gitInit).not.toHaveBeenCalled();
done();
});
it("Skip package install with command option", async done => {
const mockTemplate = {
generateConfig: { test: "test" },
templatePaths: ["test"]
};
const mockProjLib = {
getProject: () => {
return mockTemplate;
},
projectIds: ["empty"],
projectType: "type",
themes: ["ig"]
};
newCmd.template = jasmine.createSpyObj("TemplateManager", {
getFrameworkById: {},
getProjectLibrary: mockProjLib
});
spyOn(mockTemplate, "generateConfig");
spyOn(Util, "gitInit");
await newCmd.execute({ name: "title", framework: "jq", skipInstall: true });
expect(PackageManager.installPackages).not.toHaveBeenCalled();
expect(process.chdir).not.toHaveBeenCalled();
done();
});
}); | the_stack |
import { _getProvider, FirebaseApp, getApp } from '@firebase/app';
import {
ref as refInternal,
FirebaseStorageImpl,
connectStorageEmulator as connectEmulatorInternal
} from './service';
import { Provider } from '@firebase/component';
import {
StorageReference,
FirebaseStorage,
UploadResult,
ListOptions,
ListResult,
UploadTask,
SettableMetadata,
UploadMetadata,
FullMetadata
} from './public-types';
import { Metadata as MetadataInternal } from './metadata';
import {
uploadBytes as uploadBytesInternal,
uploadBytesResumable as uploadBytesResumableInternal,
uploadString as uploadStringInternal,
getMetadata as getMetadataInternal,
updateMetadata as updateMetadataInternal,
list as listInternal,
listAll as listAllInternal,
getDownloadURL as getDownloadURLInternal,
deleteObject as deleteObjectInternal,
Reference,
_getChild as _getChildInternal
} from './reference';
import { STORAGE_TYPE } from './constants';
import { EmulatorMockTokenOptions, getModularInstance } from '@firebase/util';
import { StringFormat } from './implementation/string';
export { EmulatorMockTokenOptions } from '@firebase/util';
/**
* Public types.
*/
export * from './public-types';
export { Location as _Location } from './implementation/location';
export { UploadTask as _UploadTask } from './task';
export type { Reference as _Reference } from './reference';
export type { FirebaseStorageImpl as _FirebaseStorageImpl } from './service';
export { FbsBlob as _FbsBlob } from './implementation/blob';
export { dataFromString as _dataFromString } from './implementation/string';
export {
invalidRootOperation as _invalidRootOperation,
invalidArgument as _invalidArgument
} from './implementation/error';
export {
TaskEvent as _TaskEvent,
TaskState as _TaskState
} from './implementation/taskenums';
/**
* Uploads data to this object's location.
* The upload is not resumable.
* @public
* @param ref - {@link StorageReference} where data should be uploaded.
* @param data - The data to upload.
* @param metadata - Metadata for the data to upload.
* @returns A Promise containing an UploadResult
*/
export function uploadBytes(
ref: StorageReference,
data: Blob | Uint8Array | ArrayBuffer,
metadata?: UploadMetadata
): Promise<UploadResult> {
ref = getModularInstance(ref);
return uploadBytesInternal(
ref as Reference,
data,
metadata as MetadataInternal
);
}
/**
* Uploads a string to this object's location.
* The upload is not resumable.
* @public
* @param ref - {@link StorageReference} where string should be uploaded.
* @param value - The string to upload.
* @param format - The format of the string to upload.
* @param metadata - Metadata for the string to upload.
* @returns A Promise containing an UploadResult
*/
export function uploadString(
ref: StorageReference,
value: string,
format?: StringFormat,
metadata?: UploadMetadata
): Promise<UploadResult> {
ref = getModularInstance(ref);
return uploadStringInternal(
ref as Reference,
value,
format,
metadata as MetadataInternal
);
}
/**
* Uploads data to this object's location.
* The upload can be paused and resumed, and exposes progress updates.
* @public
* @param ref - {@link StorageReference} where data should be uploaded.
* @param data - The data to upload.
* @param metadata - Metadata for the data to upload.
* @returns An UploadTask
*/
export function uploadBytesResumable(
ref: StorageReference,
data: Blob | Uint8Array | ArrayBuffer,
metadata?: UploadMetadata
): UploadTask {
ref = getModularInstance(ref);
return uploadBytesResumableInternal(
ref as Reference,
data,
metadata as MetadataInternal
) as UploadTask;
}
/**
* A `Promise` that resolves with the metadata for this object. If this
* object doesn't exist or metadata cannot be retreived, the promise is
* rejected.
* @public
* @param ref - {@link StorageReference} to get metadata from.
*/
export function getMetadata(ref: StorageReference): Promise<FullMetadata> {
ref = getModularInstance(ref);
return getMetadataInternal(ref as Reference) as Promise<FullMetadata>;
}
/**
* Updates the metadata for this object.
* @public
* @param ref - {@link StorageReference} to update metadata for.
* @param metadata - The new metadata for the object.
* Only values that have been explicitly set will be changed. Explicitly
* setting a value to null will remove the metadata.
* @returns A `Promise` that resolves with the new metadata for this object.
*/
export function updateMetadata(
ref: StorageReference,
metadata: SettableMetadata
): Promise<FullMetadata> {
ref = getModularInstance(ref);
return updateMetadataInternal(
ref as Reference,
metadata as Partial<MetadataInternal>
) as Promise<FullMetadata>;
}
/**
* List items (files) and prefixes (folders) under this storage reference.
*
* List API is only available for Firebase Rules Version 2.
*
* GCS is a key-blob store. Firebase Storage imposes the semantic of '/'
* delimited folder structure.
* Refer to GCS's List API if you want to learn more.
*
* To adhere to Firebase Rules's Semantics, Firebase Storage does not
* support objects whose paths end with "/" or contain two consecutive
* "/"s. Firebase Storage List API will filter these unsupported objects.
* list() may fail if there are too many unsupported objects in the bucket.
* @public
*
* @param ref - {@link StorageReference} to get list from.
* @param options - See {@link ListOptions} for details.
* @returns A `Promise` that resolves with the items and prefixes.
* `prefixes` contains references to sub-folders and `items`
* contains references to objects in this folder. `nextPageToken`
* can be used to get the rest of the results.
*/
export function list(
ref: StorageReference,
options?: ListOptions
): Promise<ListResult> {
ref = getModularInstance(ref);
return listInternal(ref as Reference, options);
}
/**
* List all items (files) and prefixes (folders) under this storage reference.
*
* This is a helper method for calling list() repeatedly until there are
* no more results. The default pagination size is 1000.
*
* Note: The results may not be consistent if objects are changed while this
* operation is running.
*
* Warning: `listAll` may potentially consume too many resources if there are
* too many results.
* @public
* @param ref - {@link StorageReference} to get list from.
*
* @returns A `Promise` that resolves with all the items and prefixes under
* the current storage reference. `prefixes` contains references to
* sub-directories and `items` contains references to objects in this
* folder. `nextPageToken` is never returned.
*/
export function listAll(ref: StorageReference): Promise<ListResult> {
ref = getModularInstance(ref);
return listAllInternal(ref as Reference);
}
/**
* Returns the download URL for the given {@link StorageReference}.
* @public
* @param ref - {@link StorageReference} to get the download URL for.
* @returns A `Promise` that resolves with the download
* URL for this object.
*/
export function getDownloadURL(ref: StorageReference): Promise<string> {
ref = getModularInstance(ref);
return getDownloadURLInternal(ref as Reference);
}
/**
* Deletes the object at this location.
* @public
* @param ref - {@link StorageReference} for object to delete.
* @returns A `Promise` that resolves if the deletion succeeds.
*/
export function deleteObject(ref: StorageReference): Promise<void> {
ref = getModularInstance(ref);
return deleteObjectInternal(ref as Reference);
}
/**
* Returns a {@link StorageReference} for the given url.
* @param storage - {@link FirebaseStorage} instance.
* @param url - URL. If empty, returns root reference.
* @public
*/
export function ref(storage: FirebaseStorage, url?: string): StorageReference;
/**
* Returns a {@link StorageReference} for the given path in the
* default bucket.
* @param storageOrRef - {@link FirebaseStorage} or {@link StorageReference}.
* @param pathOrUrlStorage - path. If empty, returns root reference (if {@link FirebaseStorage}
* instance provided) or returns same reference (if {@link StorageReference} provided).
* @public
*/
export function ref(
storageOrRef: FirebaseStorage | StorageReference,
path?: string
): StorageReference;
export function ref(
serviceOrRef: FirebaseStorage | StorageReference,
pathOrUrl?: string
): StorageReference | null {
serviceOrRef = getModularInstance(serviceOrRef);
return refInternal(
serviceOrRef as FirebaseStorageImpl | Reference,
pathOrUrl
);
}
/**
* @internal
*/
export function _getChild(ref: StorageReference, childPath: string): Reference {
return _getChildInternal(ref as Reference, childPath);
}
export { StringFormat } from './implementation/string';
/**
* Gets a {@link FirebaseStorage} instance for the given Firebase app.
* @public
* @param app - Firebase app to get {@link FirebaseStorage} instance for.
* @param bucketUrl - The gs:// url to your Firebase Storage Bucket.
* If not passed, uses the app's default Storage Bucket.
* @returns A {@link FirebaseStorage} instance.
*/
export function getStorage(
app: FirebaseApp = getApp(),
bucketUrl?: string
): FirebaseStorage {
app = getModularInstance(app);
const storageProvider: Provider<'storage'> = _getProvider(app, STORAGE_TYPE);
const storageInstance = storageProvider.getImmediate({
identifier: bucketUrl
});
return storageInstance;
}
/**
* Modify this {@link FirebaseStorage} instance to communicate with the Cloud Storage emulator.
*
* @param storage - The {@link FirebaseStorage} instance
* @param host - The emulator host (ex: localhost)
* @param port - The emulator port (ex: 5001)
* @param options - Emulator options. `options.mockUserToken` is the mock auth
* token to use for unit testing Security Rules.
* @public
*/
export function connectStorageEmulator(
storage: FirebaseStorage,
host: string,
port: number,
options: {
mockUserToken?: EmulatorMockTokenOptions | string;
} = {}
): void {
connectEmulatorInternal(storage as FirebaseStorageImpl, host, port, options);
} | the_stack |
import { PagedAsyncIterableIterator } from "@azure/core-paging";
import { DataCollectionRuleAssociations } from "../operationsInterfaces";
import * as coreClient from "@azure/core-client";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
import { MonitorClient } from "../monitorClient";
import {
DataCollectionRuleAssociationProxyOnlyResource,
DataCollectionRuleAssociationsListByResourceNextOptionalParams,
DataCollectionRuleAssociationsListByResourceOptionalParams,
DataCollectionRuleAssociationsListByRuleNextOptionalParams,
DataCollectionRuleAssociationsListByRuleOptionalParams,
DataCollectionRuleAssociationsListByResourceResponse,
DataCollectionRuleAssociationsListByRuleResponse,
DataCollectionRuleAssociationsGetOptionalParams,
DataCollectionRuleAssociationsGetResponse,
DataCollectionRuleAssociationsCreateOptionalParams,
DataCollectionRuleAssociationsCreateResponse,
DataCollectionRuleAssociationsDeleteOptionalParams,
DataCollectionRuleAssociationsListByResourceNextResponse,
DataCollectionRuleAssociationsListByRuleNextResponse
} from "../models";
/// <reference lib="esnext.asynciterable" />
/** Class containing DataCollectionRuleAssociations operations. */
export class DataCollectionRuleAssociationsImpl
implements DataCollectionRuleAssociations {
private readonly client: MonitorClient;
/**
* Initialize a new instance of the class DataCollectionRuleAssociations class.
* @param client Reference to the service client
*/
constructor(client: MonitorClient) {
this.client = client;
}
/**
* Lists associations for the specified resource.
* @param resourceUri The identifier of the resource.
* @param options The options parameters.
*/
public listByResource(
resourceUri: string,
options?: DataCollectionRuleAssociationsListByResourceOptionalParams
): PagedAsyncIterableIterator<
DataCollectionRuleAssociationProxyOnlyResource
> {
const iter = this.listByResourcePagingAll(resourceUri, options);
return {
next() {
return iter.next();
},
[Symbol.asyncIterator]() {
return this;
},
byPage: () => {
return this.listByResourcePagingPage(resourceUri, options);
}
};
}
private async *listByResourcePagingPage(
resourceUri: string,
options?: DataCollectionRuleAssociationsListByResourceOptionalParams
): AsyncIterableIterator<DataCollectionRuleAssociationProxyOnlyResource[]> {
let result = await this._listByResource(resourceUri, options);
yield result.value || [];
let continuationToken = result.nextLink;
while (continuationToken) {
result = await this._listByResourceNext(
resourceUri,
continuationToken,
options
);
continuationToken = result.nextLink;
yield result.value || [];
}
}
private async *listByResourcePagingAll(
resourceUri: string,
options?: DataCollectionRuleAssociationsListByResourceOptionalParams
): AsyncIterableIterator<DataCollectionRuleAssociationProxyOnlyResource> {
for await (const page of this.listByResourcePagingPage(
resourceUri,
options
)) {
yield* page;
}
}
/**
* Lists associations for the specified data collection rule.
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param dataCollectionRuleName The name of the data collection rule. The name is case insensitive.
* @param options The options parameters.
*/
public listByRule(
resourceGroupName: string,
dataCollectionRuleName: string,
options?: DataCollectionRuleAssociationsListByRuleOptionalParams
): PagedAsyncIterableIterator<
DataCollectionRuleAssociationProxyOnlyResource
> {
const iter = this.listByRulePagingAll(
resourceGroupName,
dataCollectionRuleName,
options
);
return {
next() {
return iter.next();
},
[Symbol.asyncIterator]() {
return this;
},
byPage: () => {
return this.listByRulePagingPage(
resourceGroupName,
dataCollectionRuleName,
options
);
}
};
}
private async *listByRulePagingPage(
resourceGroupName: string,
dataCollectionRuleName: string,
options?: DataCollectionRuleAssociationsListByRuleOptionalParams
): AsyncIterableIterator<DataCollectionRuleAssociationProxyOnlyResource[]> {
let result = await this._listByRule(
resourceGroupName,
dataCollectionRuleName,
options
);
yield result.value || [];
let continuationToken = result.nextLink;
while (continuationToken) {
result = await this._listByRuleNext(
resourceGroupName,
dataCollectionRuleName,
continuationToken,
options
);
continuationToken = result.nextLink;
yield result.value || [];
}
}
private async *listByRulePagingAll(
resourceGroupName: string,
dataCollectionRuleName: string,
options?: DataCollectionRuleAssociationsListByRuleOptionalParams
): AsyncIterableIterator<DataCollectionRuleAssociationProxyOnlyResource> {
for await (const page of this.listByRulePagingPage(
resourceGroupName,
dataCollectionRuleName,
options
)) {
yield* page;
}
}
/**
* Lists associations for the specified resource.
* @param resourceUri The identifier of the resource.
* @param options The options parameters.
*/
private _listByResource(
resourceUri: string,
options?: DataCollectionRuleAssociationsListByResourceOptionalParams
): Promise<DataCollectionRuleAssociationsListByResourceResponse> {
return this.client.sendOperationRequest(
{ resourceUri, options },
listByResourceOperationSpec
);
}
/**
* Lists associations for the specified data collection rule.
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param dataCollectionRuleName The name of the data collection rule. The name is case insensitive.
* @param options The options parameters.
*/
private _listByRule(
resourceGroupName: string,
dataCollectionRuleName: string,
options?: DataCollectionRuleAssociationsListByRuleOptionalParams
): Promise<DataCollectionRuleAssociationsListByRuleResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, dataCollectionRuleName, options },
listByRuleOperationSpec
);
}
/**
* Returns the specified association.
* @param resourceUri The identifier of the resource.
* @param associationName The name of the association. The name is case insensitive.
* @param options The options parameters.
*/
get(
resourceUri: string,
associationName: string,
options?: DataCollectionRuleAssociationsGetOptionalParams
): Promise<DataCollectionRuleAssociationsGetResponse> {
return this.client.sendOperationRequest(
{ resourceUri, associationName, options },
getOperationSpec
);
}
/**
* Creates or updates an association.
* @param resourceUri The identifier of the resource.
* @param associationName The name of the association. The name is case insensitive.
* @param options The options parameters.
*/
create(
resourceUri: string,
associationName: string,
options?: DataCollectionRuleAssociationsCreateOptionalParams
): Promise<DataCollectionRuleAssociationsCreateResponse> {
return this.client.sendOperationRequest(
{ resourceUri, associationName, options },
createOperationSpec
);
}
/**
* Deletes an association.
* @param resourceUri The identifier of the resource.
* @param associationName The name of the association. The name is case insensitive.
* @param options The options parameters.
*/
delete(
resourceUri: string,
associationName: string,
options?: DataCollectionRuleAssociationsDeleteOptionalParams
): Promise<void> {
return this.client.sendOperationRequest(
{ resourceUri, associationName, options },
deleteOperationSpec
);
}
/**
* ListByResourceNext
* @param resourceUri The identifier of the resource.
* @param nextLink The nextLink from the previous successful call to the ListByResource method.
* @param options The options parameters.
*/
private _listByResourceNext(
resourceUri: string,
nextLink: string,
options?: DataCollectionRuleAssociationsListByResourceNextOptionalParams
): Promise<DataCollectionRuleAssociationsListByResourceNextResponse> {
return this.client.sendOperationRequest(
{ resourceUri, nextLink, options },
listByResourceNextOperationSpec
);
}
/**
* ListByRuleNext
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param dataCollectionRuleName The name of the data collection rule. The name is case insensitive.
* @param nextLink The nextLink from the previous successful call to the ListByRule method.
* @param options The options parameters.
*/
private _listByRuleNext(
resourceGroupName: string,
dataCollectionRuleName: string,
nextLink: string,
options?: DataCollectionRuleAssociationsListByRuleNextOptionalParams
): Promise<DataCollectionRuleAssociationsListByRuleNextResponse> {
return this.client.sendOperationRequest(
{ resourceGroupName, dataCollectionRuleName, nextLink, options },
listByRuleNextOperationSpec
);
}
}
// Operation Specifications
const serializer = coreClient.createSerializer(Mappers, /* isXml */ false);
const listByResourceOperationSpec: coreClient.OperationSpec = {
path:
"/{resourceUri}/providers/Microsoft.Insights/dataCollectionRuleAssociations",
httpMethod: "GET",
responses: {
200: {
bodyMapper:
Mappers.DataCollectionRuleAssociationProxyOnlyResourceListResult
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [Parameters.$host, Parameters.resourceUri1],
headerParameters: [Parameters.accept],
serializer
};
const listByRuleOperationSpec: coreClient.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/dataCollectionRules/{dataCollectionRuleName}/associations",
httpMethod: "GET",
responses: {
200: {
bodyMapper:
Mappers.DataCollectionRuleAssociationProxyOnlyResourceListResult
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.subscriptionId,
Parameters.dataCollectionRuleName
],
headerParameters: [Parameters.accept],
serializer
};
const getOperationSpec: coreClient.OperationSpec = {
path:
"/{resourceUri}/providers/Microsoft.Insights/dataCollectionRuleAssociations/{associationName}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.DataCollectionRuleAssociationProxyOnlyResource
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.resourceUri1,
Parameters.associationName
],
headerParameters: [Parameters.accept],
serializer
};
const createOperationSpec: coreClient.OperationSpec = {
path:
"/{resourceUri}/providers/Microsoft.Insights/dataCollectionRuleAssociations/{associationName}",
httpMethod: "PUT",
responses: {
200: {
bodyMapper: Mappers.DataCollectionRuleAssociationProxyOnlyResource
},
201: {
bodyMapper: Mappers.DataCollectionRuleAssociationProxyOnlyResource
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
requestBody: Parameters.body2,
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.resourceUri1,
Parameters.associationName
],
headerParameters: [Parameters.accept, Parameters.contentType],
mediaType: "json",
serializer
};
const deleteOperationSpec: coreClient.OperationSpec = {
path:
"/{resourceUri}/providers/Microsoft.Insights/dataCollectionRuleAssociations/{associationName}",
httpMethod: "DELETE",
responses: {
200: {},
204: {},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.resourceUri1,
Parameters.associationName
],
headerParameters: [Parameters.accept],
serializer
};
const listByResourceNextOperationSpec: coreClient.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper:
Mappers.DataCollectionRuleAssociationProxyOnlyResourceListResult
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.nextLink,
Parameters.resourceUri1
],
headerParameters: [Parameters.accept],
serializer
};
const listByRuleNextOperationSpec: coreClient.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper:
Mappers.DataCollectionRuleAssociationProxyOnlyResourceListResult
},
default: {
bodyMapper: Mappers.ErrorResponseCommonV2
}
},
queryParameters: [Parameters.apiVersion12],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.subscriptionId,
Parameters.nextLink,
Parameters.dataCollectionRuleName
],
headerParameters: [Parameters.accept],
serializer
}; | the_stack |
import { snowflakeIdv1Option } from "./snowflakeIdv1Option"
/**
*
*/
export class snowflakeIdv1 {
/**
* 雪花计算方法,(1-漂移算法|2-传统算法),默认 1
*/
private Method
/**
* 基础时间(ms 单位),不能超过当前系统时间
*/
private BaseTime
/**
* 机器码,必须由外部设定,最大值 2^WorkerIdBitLength-1
*/
private WorkerId
/**
* 机器码位长,默认值 6,取值范围 [1, 15](要求:序列数位长+机器码位长不超过 22)
*/
private WorkerIdBitLength
/**
* 序列数位长,默认值 6,取值范围 [3, 21](要求:序列数位长+机器码位长不超过 22)
*/
private SeqBitLength
/**
* 最大序列数(含),设置范围 [MinSeqNumber, 2^SeqBitLength-1],默认值 0,表示最大序列数取最大值(2^SeqBitLength-1])
*/
private MaxSeqNumber
/**
* 最小序列数(含),默认值 5,取值范围 [5, MaxSeqNumber],每毫秒的前 5 个序列数对应编号 0-4 是保留位,其中 1-4 是时间回拨相应预留位,0 是手工新值预留位
*/
private MinSeqNumber
/**
* 最大漂移次数(含),默认 2000,推荐范围 500-10000(与计算能力有关)
*/
private TopOverCostCount
/**
*
*/
private _TimestampShift
/**
*
*/
private _CurrentSeqNumber
/**
*
*/
private _LastTimeTick: bigint
/**
* 回拨次序, 支持 4 次回拨次序(避免回拨重叠导致 ID 重复)
*/
private _TurnBackTimeTick: bigint
/**
*
*/
private _TurnBackIndex
/**
*
*/
private _IsOverCost
/**
*
*/
private _OverCostCountInOneTerm
/**
*Creates an instance of Genid.
* @author zhupengfeivip
* @param {{
* BaseTime: 1577836800000, // 基础时间(ms 单位),默认2020年1月1日,不能超过当前系统时间,一旦投入使用就不能再更改,更改后产生的ID可能会和以前的重复
* WorkerId: Number, // 机器码,必须由外部设定,最大值 2^WorkerIdBitLength-1
* WorkerIdBitLength: 6, // 机器码位长,默认值 6,取值范围 [1, 15](要求:序列数位长+机器码位长不超过 22)
* SeqBitLength: 6, // 序列数位长,默认值 6,取值范围 [3, 21](要求:序列数位长+机器码位长不超过 22)
* MaxSeqNumber: 5, // 最大序列数(含),设置范围 [MinSeqNumber, 2^SeqBitLength-1],默认值 0,表示最大序列数取最大值(2^SeqBitLength-1])
* MinSeqNumber: 5, // 最小序列数(含),默认值 5,取值范围 [5, MaxSeqNumber],每毫秒的前 5 个序列数对应编号 0-4 是保留位,其中 1-4 是时间回拨相应预留位,0 是手工新值预留位
* TopOverCostCount: 2000// 最大漂移次数(含),默认 2000,推荐范围 500-10000(与计算能力有关)
* }} options
* @memberof Genid
*/
constructor(options: snowflakeIdv1Option) {
if (options.workerId === undefined)
throw new Error("lost WorkerId")
// 1.BaseTime 2020年1月1日 Wed, 01 Jan 2020 00:00:00 GMT 0时区的2020年1月1日
const BaseTime = 1577836800000
if (!options.baseTime || options.baseTime < 0)
options.baseTime = BaseTime
// 2.WorkerIdBitLength
const WorkerIdBitLength = 6
if (!options.workerIdBitLength || options.workerIdBitLength < 0)
options.workerIdBitLength = WorkerIdBitLength
// 4.SeqBitLength
const SeqBitLength = 6
if (!options.seqBitLength || options.seqBitLength < 0)
options.seqBitLength = SeqBitLength
// 5.MaxSeqNumber
if (options.maxSeqNumber == undefined || options.maxSeqNumber <= 0)
options.maxSeqNumber = (1 << SeqBitLength) - 1
// 6.MinSeqNumber
const MinSeqNumber = 5
if (options.minSeqNumber == undefined || options.minSeqNumber < 0)
options.minSeqNumber = MinSeqNumber
// 7.Others
const topOverCostCount = 2000
if (options.topOverCostCount == undefined || options.topOverCostCount < 0)
options.topOverCostCount = topOverCostCount
if (options.method !== 2)
options.method = 1
else
options.method = 2
this.Method = BigInt(options.method)
this.BaseTime = BigInt(options.baseTime)
this.WorkerId = BigInt(options.workerId)
this.WorkerIdBitLength = BigInt(options.workerIdBitLength)
this.SeqBitLength = BigInt(options.seqBitLength)
this.MaxSeqNumber = BigInt(options.maxSeqNumber)
this.MinSeqNumber = BigInt(options.minSeqNumber)
this.TopOverCostCount = BigInt(options.topOverCostCount)
const timestampShift = this.WorkerIdBitLength + this.SeqBitLength
const currentSeqNumber = this.MinSeqNumber
this._TimestampShift = timestampShift
this._CurrentSeqNumber = currentSeqNumber
this._LastTimeTick = BigInt(0)
this._TurnBackTimeTick = BigInt(0)
this._TurnBackIndex = 0
this._IsOverCost = false
this._OverCostCountInOneTerm = 0
}
/**
* 当前序列号超过最大范围,开始透支使用序号号的通知事件,,本项暂未实现
* @returns
*/
private BeginOverCostAction(useTimeTick: any) {
}
/**
* 当前序列号超过最大范围,结束透支使用序号号的通知事件,,本项暂未实现
* @returns
*/
private EndOverCostAction(useTimeTick: any) {
// if m1._TermIndex > 10000 {
// m1._TermIndex = 0
// }
}
/**
* 开始时间回拨通知,本项暂未实现
* @returns
*/
private BeginTurnBackAction(useTimeTick: any) {
}
/**
* 结束时间回拨通知,本项暂未实现
* @returns
*/
private EndTurnBackAction(useTimeTick: any) {
}
/**
* 雪花漂移算法
* @returns
*/
private NextOverCostId(): bigint {
const currentTimeTick = this.GetCurrentTimeTick()
if (currentTimeTick > this._LastTimeTick) {
this.EndOverCostAction(currentTimeTick)
//当前时间大于上次时间,说明是时间是递增的,这是正常情况
this._LastTimeTick = currentTimeTick
this._CurrentSeqNumber = this.MinSeqNumber
this._IsOverCost = false
this._OverCostCountInOneTerm = 0
// this._GenCountInOneTerm = 0
return this.CalcId(this._LastTimeTick)
}
if (this._OverCostCountInOneTerm >= this.TopOverCostCount) {
//当前漂移次数超过最大限制
// TODO: 在漂移终止,等待时间对齐时,如果发生时间回拨较长,则此处可能等待较长时间。可优化为:在漂移终止时增加时间回拨应对逻辑。(该情况发生概率很低)
this.EndOverCostAction(currentTimeTick)
this._LastTimeTick = this.GetNextTimeTick()
this._CurrentSeqNumber = this.MinSeqNumber
this._IsOverCost = false
this._OverCostCountInOneTerm = 0
// this._GenCountInOneTerm = 0
return this.CalcId(this._LastTimeTick)
}
if (this._CurrentSeqNumber > this.MaxSeqNumber) {
//当前序列数超过最大限制,则要提前透支
this._LastTimeTick++
this._CurrentSeqNumber = this.MinSeqNumber
this._IsOverCost = true
this._OverCostCountInOneTerm++
// this._GenCountInOneTerm++
return this.CalcId(this._LastTimeTick)
}
// this._GenCountInOneTerm++
return this.CalcId(this._LastTimeTick)
}
/**
* 常规雪花算法
* @returns
*/
private NextNormalId() {
const currentTimeTick = this.GetCurrentTimeTick()
if (currentTimeTick < this._LastTimeTick) {
if (this._TurnBackTimeTick < 1) {
this._TurnBackTimeTick = this._LastTimeTick - BigInt(1)
this._TurnBackIndex++
// 每毫秒序列数的前 5 位是预留位,0 用于手工新值,1-4 是时间回拨次序
// 支持 4 次回拨次序(避免回拨重叠导致 ID 重复),可无限次回拨(次序循环使用)。
if (this._TurnBackIndex > 4)
this._TurnBackIndex = 1
this.BeginTurnBackAction(this._TurnBackTimeTick)
}
return this.CalcTurnBackId(this._TurnBackTimeTick)
}
// 时间追平时,_TurnBackTimeTick 清零
if (this._TurnBackTimeTick > 0) {
this.EndTurnBackAction(this._TurnBackTimeTick)
this._TurnBackTimeTick = BigInt(0)
}
if (currentTimeTick > this._LastTimeTick) {
this._LastTimeTick = currentTimeTick
this._CurrentSeqNumber = this.MinSeqNumber
return this.CalcId(this._LastTimeTick)
}
if (this._CurrentSeqNumber > this.MaxSeqNumber) {
this.BeginOverCostAction(currentTimeTick)
// this._TermIndex++
this._LastTimeTick++
this._CurrentSeqNumber = this.MinSeqNumber
this._IsOverCost = true
this._OverCostCountInOneTerm = 1
// this._GenCountInOneTerm = 1
return this.CalcId(this._LastTimeTick)
}
return this.CalcId(this._LastTimeTick)
}
/**
* 生成ID
* @param useTimeTick 时间戳
* @returns
*/
private CalcId(useTimeTick: bigint) {
//ID组成 1.相对基础时间的时间差 | 2.WorkerId | 3.序列数
//时间差,是生成ID时的系统时间减去 BaseTime 的总时间差(毫秒单位)
const result = BigInt(useTimeTick << this._TimestampShift) + BigInt(this.WorkerId << this.SeqBitLength) + BigInt(this._CurrentSeqNumber)
this._CurrentSeqNumber++
return result
}
/**
* 生成时间回拨ID
* @returns
*/
private CalcTurnBackId(useTimeTick: any) {
const result = BigInt(useTimeTick << this._TimestampShift) + BigInt(this.WorkerId << this.SeqBitLength) + BigInt(this._TurnBackIndex)
this._TurnBackTimeTick--
return result
}
/**
*
* @returns
*/
private GetCurrentTimeTick() {
const millis = BigInt((new Date()).valueOf())
return millis - this.BaseTime
}
/**
*
* @returns
*/
private GetNextTimeTick() {
let tempTimeTicker = this.GetCurrentTimeTick()
while (tempTimeTicker <= this._LastTimeTick) {
tempTimeTicker = this.GetCurrentTimeTick()
}
return tempTimeTicker
}
/**
* 生成ID
* @returns 始终输出number类型,超过时throw error
*/
public NextNumber(): number {
if (this._IsOverCost) {
//
let id = this.NextOverCostId()
if (id >= 9007199254740992n)
throw Error(`${id.toString()} over max of Number 9007199254740992`)
return parseInt(id.toString())
} else {
//
let id = this.NextNormalId()
if (id >= 9007199254740992n)
throw Error(`${id.toString()} over max of Number 9007199254740992`)
return parseInt(id.toString())
}
}
/**
* 生成ID
* @returns 根据输出数值判断,小于number最大值时输出number类型,大于时输出bigint
*/
public NextId(): number | bigint {
if (this._IsOverCost) {
//
let id = this.NextOverCostId()
if (id >= 9007199254740992n)
return id
else
return parseInt(id.toString())
} else {
//
let id = this.NextNormalId()
if (id >= 9007199254740992n)
return id
else
return parseInt(id.toString())
}
}
/**
* 生成ID
* @returns 始终输出bigint类型
*/
public NextBigId(): bigint {
if (this._IsOverCost) {
//
return this.NextOverCostId()
} else {
//
return this.NextNormalId()
}
}
} | the_stack |
import { test as it, expect } from './pageTest';
import { attachFrame } from '../config/utils';
import path from 'path';
import fs from 'fs';
import formidable from 'formidable';
it('should upload the file', async ({ page, server, asset }) => {
await page.goto(server.PREFIX + '/input/fileupload.html');
const filePath = path.relative(process.cwd(), asset('file-to-upload.txt'));
const input = await page.$('input');
await input.setInputFiles(filePath);
expect(await page.evaluate(e => e.files[0].name, input)).toBe('file-to-upload.txt');
expect(await page.evaluate(e => {
const reader = new FileReader();
const promise = new Promise(fulfill => reader.onload = fulfill);
reader.readAsText(e.files[0]);
return promise.then(() => reader.result);
}, input)).toBe('contents of the file');
});
it('should work', async ({ page, asset }) => {
await page.setContent(`<input type=file>`);
await page.setInputFiles('input', asset('file-to-upload.txt'));
expect(await page.$eval('input', input => input.files.length)).toBe(1);
expect(await page.$eval('input', input => input.files[0].name)).toBe('file-to-upload.txt');
});
it('should work with label', async ({ page, asset }) => {
await page.setContent(`<label for=target>Choose a file</label><input id=target type=file>`);
await page.setInputFiles('text=Choose a file', asset('file-to-upload.txt'));
expect(await page.$eval('input', input => input.files.length)).toBe(1);
expect(await page.$eval('input', input => input.files[0].name)).toBe('file-to-upload.txt');
});
it('should set from memory', async ({ page }) => {
await page.setContent(`<input type=file>`);
await page.setInputFiles('input', {
name: 'test.txt',
mimeType: 'text/plain',
buffer: Buffer.from('this is a test')
});
expect(await page.$eval('input', input => input.files.length)).toBe(1);
expect(await page.$eval('input', input => input.files[0].name)).toBe('test.txt');
});
it('should emit event once', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [chooser] = await Promise.all([
new Promise(f => page.once('filechooser', f)),
page.click('input'),
]);
expect(chooser).toBeTruthy();
});
it('should emit event for iframe', async ({ page, server, browserName }) => {
it.skip(browserName === 'firefox');
const frame = await attachFrame(page, 'frame1', server.EMPTY_PAGE);
await frame.setContent(`<input type=file>`);
const [chooser] = await Promise.all([
new Promise(f => page.once('filechooser', f)),
frame.click('input'),
]);
expect(chooser).toBeTruthy();
});
it('should emit event on/off', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [chooser] = await Promise.all([
new Promise(f => {
const listener = chooser => {
page.off('filechooser', listener);
f(chooser);
};
page.on('filechooser', listener);
}),
page.click('input'),
]);
expect(chooser).toBeTruthy();
});
it('should emit event addListener/removeListener', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [chooser] = await Promise.all([
new Promise(f => {
const listener = chooser => {
page.removeListener('filechooser', listener);
f(chooser);
};
page.addListener('filechooser', listener);
}),
page.click('input'),
]);
expect(chooser).toBeTruthy();
});
it('should work when file input is attached to DOM', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [chooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
expect(chooser).toBeTruthy();
});
it('should work when file input is not attached to DOM', async ({ page, asset }) => {
const [,content] = await Promise.all([
page.waitForEvent('filechooser').then(chooser => chooser.setFiles(asset('file-to-upload.txt'))),
page.evaluate(async () => {
const el = document.createElement('input');
el.type = 'file';
el.click();
await new Promise(x => el.oninput = x);
const reader = new FileReader();
const promise = new Promise(fulfill => reader.onload = fulfill);
reader.readAsText(el.files[0]);
return promise.then(() => reader.result);
}),
]);
expect(content).toBe('contents of the file');
});
it('should not throw when filechooser belongs to iframe', async ({ page, server, browserName }) => {
await page.goto(server.PREFIX + '/frames/one-frame.html');
const frame = page.mainFrame().childFrames()[0];
await frame.setContent(`
<div>Click me</div>
<script>
document.querySelector('div').addEventListener('click', () => {
const input = document.createElement('input');
input.type = 'file';
input.click();
window.parent.__done = true;
});
</script>
`);
await Promise.all([
page.waitForEvent('filechooser'),
frame.click('div')
]);
await page.waitForFunction(() => (window as any).__done);
});
it('should not throw when frame is detached immediately', async ({ page, server }) => {
await page.goto(server.PREFIX + '/frames/one-frame.html');
const frame = page.mainFrame().childFrames()[0];
await frame.setContent(`
<div>Click me</div>
<script>
document.querySelector('div').addEventListener('click', () => {
const input = document.createElement('input');
input.type = 'file';
input.click();
window.parent.__done = true;
const iframe = window.parent.document.querySelector('iframe');
iframe.remove();
});
</script>
`);
page.on('filechooser', () => {}); // To ensure we handle file choosers.
await frame.click('div');
await page.waitForFunction(() => (window as any).__done);
});
it('should work with CSP', async ({ page, server, asset }) => {
server.setCSP('/empty.html', 'default-src "none"');
await page.goto(server.EMPTY_PAGE);
await page.setContent(`<input type=file>`);
await page.setInputFiles('input', asset('file-to-upload.txt'));
expect(await page.$eval('input', input => input.files.length)).toBe(1);
expect(await page.$eval('input', input => input.files[0].name)).toBe('file-to-upload.txt');
});
it('should respect timeout', async ({ page, playwright }) => {
let error = null;
await page.waitForEvent('filechooser', { timeout: 1 }).catch(e => error = e);
expect(error).toBeInstanceOf(playwright.errors.TimeoutError);
});
it('should respect default timeout when there is no custom timeout', async ({ page, playwright }) => {
page.setDefaultTimeout(1);
let error = null;
await page.waitForEvent('filechooser').catch(e => error = e);
expect(error).toBeInstanceOf(playwright.errors.TimeoutError);
});
it('should prioritize exact timeout over default timeout', async ({ page, playwright }) => {
page.setDefaultTimeout(0);
let error = null;
await page.waitForEvent('filechooser', { timeout: 1 }).catch(e => error = e);
expect(error).toBeInstanceOf(playwright.errors.TimeoutError);
});
it('should work with no timeout', async ({ page, server }) => {
const [chooser] = await Promise.all([
page.waitForEvent('filechooser', { timeout: 0 }),
page.evaluate(() => setTimeout(() => {
const el = document.createElement('input');
el.type = 'file';
el.click();
}, 50))
]);
expect(chooser).toBeTruthy();
});
it('should return the same file chooser when there are many watchdogs simultaneously', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [fileChooser1, fileChooser2] = await Promise.all([
page.waitForEvent('filechooser'),
page.waitForEvent('filechooser'),
page.$eval('input', input => input.click()),
]);
expect(fileChooser1 === fileChooser2).toBe(true);
});
it('should accept single file', async ({ page, asset }) => {
await page.setContent(`<input type=file oninput='javascript:console.timeStamp()'>`);
const [fileChooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
expect(fileChooser.page()).toBe(page);
expect(fileChooser.element()).toBeTruthy();
await fileChooser.setFiles(asset('file-to-upload.txt'));
expect(await page.$eval('input', input => input.files.length)).toBe(1);
expect(await page.$eval('input', input => input.files[0].name)).toBe('file-to-upload.txt');
});
it('should detect mime type', async ({ page, server, asset, isAndroid }) => {
it.fixme(isAndroid);
let files;
server.setRoute('/upload', async (req, res) => {
const form = new formidable.IncomingForm();
form.parse(req, function(err, fields, f) {
files = f;
res.end();
});
});
await page.goto(server.EMPTY_PAGE);
await page.setContent(`
<form action="/upload" method="post" enctype="multipart/form-data" >
<input type="file" name="file1">
<input type="file" name="file2">
<input type="submit" value="Submit">
</form>`);
await (await page.$('input[name=file1]')).setInputFiles(asset('file-to-upload.txt'));
await (await page.$('input[name=file2]')).setInputFiles(asset('pptr.png'));
await Promise.all([
page.click('input[type=submit]'),
server.waitForRequest('/upload'),
]);
const { file1, file2 } = files;
expect(file1.name).toBe('file-to-upload.txt');
expect(file1.type).toBe('text/plain');
expect(fs.readFileSync(file1.path).toString()).toBe(
fs.readFileSync(asset('file-to-upload.txt')).toString());
expect(file2.name).toBe('pptr.png');
expect(file2.type).toBe('image/png');
expect(fs.readFileSync(file2.path).toString()).toBe(
fs.readFileSync(asset('pptr.png')).toString());
});
// @see https://github.com/microsoft/playwright/issues/4704
it('should not trim big uploaded files', async ({ page, server, asset, isAndroid }) => {
it.fixme(isAndroid);
let files;
server.setRoute('/upload', async (req, res) => {
const form = new formidable.IncomingForm();
form.parse(req, function(err, fields, f) {
files = f;
res.end();
});
});
await page.goto(server.EMPTY_PAGE);
const DATA_SIZE = Math.pow(2, 20);
await Promise.all([
page.evaluate(async size => {
const body = new FormData();
body.set('file', new Blob([new Uint8Array(size)]));
await fetch('/upload', { method: 'POST', body });
}, DATA_SIZE),
server.waitForRequest('/upload'),
]);
expect(files.file.size).toBe(DATA_SIZE);
});
it('should be able to read selected file', async ({ page, asset }) => {
await page.setContent(`<input type=file>`);
const [, content] = await Promise.all([
page.waitForEvent('filechooser').then(fileChooser => fileChooser.setFiles(asset('file-to-upload.txt'))),
page.$eval('input', async picker => {
picker.click();
await new Promise(x => picker.oninput = x);
const reader = new FileReader();
const promise = new Promise(fulfill => reader.onload = fulfill);
reader.readAsText(picker.files[0]);
return promise.then(() => reader.result);
}),
]);
expect(content).toBe('contents of the file');
});
it('should be able to reset selected files with empty file list', async ({ page, asset }) => {
await page.setContent(`<input type=file>`);
const [, fileLength1] = await Promise.all([
page.waitForEvent('filechooser').then(fileChooser => fileChooser.setFiles(asset('file-to-upload.txt'))),
page.$eval('input', async picker => {
picker.click();
await new Promise(x => picker.oninput = x);
return picker.files.length;
}),
]);
expect(fileLength1).toBe(1);
const [, fileLength2] = await Promise.all([
page.waitForEvent('filechooser').then(fileChooser => fileChooser.setFiles([])),
page.$eval('input', async picker => {
picker.click();
await new Promise(x => picker.oninput = x);
return picker.files.length;
}),
]);
expect(fileLength2).toBe(0);
});
it('should not accept multiple files for single-file input', async ({ page, asset }) => {
await page.setContent(`<input type=file>`);
const [fileChooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
let error = null;
await fileChooser.setFiles([
asset('file-to-upload.txt'),
asset('pptr.png')
]).catch(e => error = e);
expect(error).not.toBe(null);
});
it('should emit input and change events', async ({ page, asset }) => {
const events = [];
await page.exposeFunction('eventHandled', e => events.push(e));
await page.setContent(`
<input id=input type=file></input>
<script>
input.addEventListener('input', e => eventHandled({ type: e.type }));
input.addEventListener('change', e => eventHandled({ type: e.type }));
</script>`);
await (await page.$('input')).setInputFiles(asset('file-to-upload.txt'));
expect(events.length).toBe(2);
expect(events[0].type).toBe('input');
expect(events[1].type).toBe('change');
});
it('should work for single file pick', async ({ page, server }) => {
await page.setContent(`<input type=file>`);
const [fileChooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
expect(fileChooser.isMultiple()).toBe(false);
});
it('should work for "multiple"', async ({ page, server }) => {
await page.setContent(`<input multiple type=file>`);
const [fileChooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
expect(fileChooser.isMultiple()).toBe(true);
});
it('should work for "webkitdirectory"', async ({ page, server }) => {
await page.setContent(`<input multiple webkitdirectory type=file>`);
const [fileChooser] = await Promise.all([
page.waitForEvent('filechooser'),
page.click('input'),
]);
expect(fileChooser.isMultiple()).toBe(true);
}); | the_stack |
import Prando from "prando";
import { BigNumber } from "bignumber.js";
import type { Account, Operation, OperationType } from "../../types";
import type {
CosmosResources,
CosmosDelegation,
CosmosUnbonding,
CosmosRedelegation,
} from "./types";
import preloadedData from "./preloadedData.mock";
import { genHex, genAddress } from "../../mock/helpers";
const { validators } = preloadedData;
function setCosmosResources(
account: Account,
delegations: CosmosDelegation[],
unbondingBalance: BigNumber = new BigNumber(0),
unbondings: CosmosUnbonding[] | null | undefined,
redelegations: CosmosRedelegation[] | null | undefined
): Account {
/** format cosmosResources given the new delegations */
account.cosmosResources = {
delegations,
delegatedBalance: delegations.reduce(
(sum, { amount }) => sum.plus(amount),
new BigNumber(0)
),
pendingRewardsBalance: delegations.reduce(
(sum, { pendingRewards }) => sum.plus(pendingRewards),
new BigNumber(0)
),
unbondingBalance: account.cosmosResources
? account.cosmosResources.unbondingBalance.plus(unbondingBalance)
: unbondingBalance,
withdrawAddress: account.id,
unbondings: unbondings ?? account.cosmosResources?.unbondings ?? [],
redelegations:
redelegations ?? account.cosmosResources?.redelegations ?? [],
};
return account;
}
function setOperationFeeValue(
operation: Operation,
base: BigNumber
): Operation {
operation.fee = new BigNumber(Math.round(base.toNumber() * 0.001));
operation.value = operation.fee;
return operation;
}
function genBaseOperation(
account: Account,
rng: Prando,
type: OperationType,
index: number
): Operation {
const { operations: ops } = account;
const address = genAddress(account.currency, rng);
const lastOp = ops[index];
const date = new Date(
(lastOp ? lastOp.date.valueOf() : Date.now()) -
rng.nextInt(0, 100000000 * rng.next() * rng.next())
);
const hash = genHex(64, rng);
/** generate given operation */
return {
id: String(`mock_op_${ops.length}_${type}_${account.id}`),
hash,
type,
value: new BigNumber(0),
fee: new BigNumber(0),
senders: [address],
recipients: [address],
blockHash: genHex(64, rng),
blockHeight:
account.blockHeight - Math.floor((Date.now() - date.valueOf()) / 900000),
accountId: account.id,
date,
extra: {},
};
}
/**
* Generates a cosmos delegation operation updating both operations list and account cosmos resources
* @memberof cosmos/mock
* @param {Account} account
* @param {Prando} rng
*/
function addDelegationOperation(account: Account, rng: Prando): Account {
const { spendableBalance } = account;
const cosmosResources: CosmosResources = account.cosmosResources
? account.cosmosResources
: {
delegations: [],
delegatedBalance: new BigNumber(0),
pendingRewardsBalance: new BigNumber(0),
unbondingBalance: new BigNumber(0),
withdrawAddress: "",
unbondings: [],
redelegations: [],
};
if (spendableBalance.isZero()) return account;
/** select position on the operation stack where we will insert the new delegation */
const opIndex = rng.next(0, 10);
const delegationOp = genBaseOperation(account, rng, "DELEGATE", opIndex);
const feeOp = genBaseOperation(account, rng, "FEES", opIndex);
const value = spendableBalance.plus(cosmosResources.delegatedBalance);
/** select between 3 to 5 validators and split the amount evenly */
const delegatedValidators = Array.from({
length: rng.nextInt(3, 5),
})
.map(() => rng.nextArrayItem(validators))
.filter(
(validator, index, arr) =>
arr.findIndex(
(v) => v.validatorAddress === validator.validatorAddress
) === index
)
.map(({ validatorAddress }, i, arr) => ({
address: validatorAddress,
amount: new BigNumber(
Math.round(value.toNumber() * rng.next(0.1, 1 / arr.length))
),
}));
delegationOp.extra = {
validators: delegatedValidators,
};
/** format delegations and randomize rewards and status */
const delegations: CosmosDelegation[] = delegatedValidators.map(
({ address, amount }) => ({
validatorAddress: address,
amount,
pendingRewards: rng.nextBoolean()
? new BigNumber(Math.round(amount.toNumber() * 0.01))
: new BigNumber(0),
status: rng.next() > 0.33 ? "bonded" : "unbonded",
})
);
setCosmosResources(account, delegations, undefined, undefined, undefined);
setOperationFeeValue(
delegationOp,
account.cosmosResources
? account.cosmosResources.delegatedBalance
: new BigNumber(0)
);
setOperationFeeValue(
feeOp,
account.cosmosResources
? account.cosmosResources.delegatedBalance
: new BigNumber(0)
);
postSyncAccount(account);
account.operations.splice(opIndex, 0, delegationOp, feeOp);
account.operationsCount += 2;
return account;
}
/**
* Generates a cosmos redelegation operation updating both operations list and account cosmos resources
* @memberof cosmos/mock
* @param {Account} account
* @param {Prando} rng
*/
function addRedelegationOperation(account: Account, rng: Prando): Account {
const cosmosResources: CosmosResources = account.cosmosResources
? account.cosmosResources
: {
delegations: [],
delegatedBalance: new BigNumber(0),
pendingRewardsBalance: new BigNumber(0),
unbondingBalance: new BigNumber(0),
withdrawAddress: "",
unbondings: [],
redelegations: [],
};
if (!cosmosResources.delegations.length) return account;
/** select position on the operation stack where we will insert the new delegation */
const opIndex = rng.next(0, 10);
const redelegationOp = genBaseOperation(account, rng, "REDELEGATE", opIndex);
const fromDelegation = rng.nextArrayItem(cosmosResources.delegations);
const amount = new BigNumber(
Math.round(fromDelegation.amount.toNumber() * rng.next(0.1, 1))
);
const toDelegation = rng.nextArrayItem(validators);
redelegationOp.extra = {
validator: {
address: toDelegation.validatorAddress,
amount,
},
cosmosSourceValidator: fromDelegation.validatorAddress,
};
const delegations = cosmosResources.delegations
.filter(
({ validatorAddress }) =>
validatorAddress === fromDelegation.validatorAddress
)
.concat([
{
validatorAddress: toDelegation.validatorAddress,
amount,
pendingRewards: rng.nextBoolean()
? new BigNumber(Math.round(amount.toNumber() * 0.01))
: new BigNumber(0),
status: rng.next() > 0.33 ? "bonded" : "unbonded",
},
]);
setCosmosResources(account, delegations, undefined, undefined, [
{
validatorSrcAddress: fromDelegation.validatorAddress,
validatorDstAddress: toDelegation.validatorAddress,
amount,
completionDate: new Date(Date.now() + 21 * 24 * 60 * 60 * 1000),
},
]);
setOperationFeeValue(redelegationOp, amount);
account.operations.splice(opIndex, 0, redelegationOp);
account.operationsCount++;
return account;
}
/**
* Generates a cosmos redelegation operation updating both operations list and account cosmos resources
* @memberof cosmos/mock
* @param {Account} account
* @param {Prando} rng
*/
function addClaimRewardsOperation(account: Account, rng: Prando): Account {
const cosmosResources: CosmosResources = account.cosmosResources
? account.cosmosResources
: {
delegations: [],
delegatedBalance: new BigNumber(0),
pendingRewardsBalance: new BigNumber(0),
unbondingBalance: new BigNumber(0),
withdrawAddress: "",
unbondings: [],
redelegations: [],
};
if (!cosmosResources.delegations.length) return account;
/** select position on the operation stack where we will insert the new claim rewards */
const opIndex = rng.next(0, 10);
const claimRewardOp = genBaseOperation(account, rng, "REWARD", opIndex);
const fromDelegation = rng.nextArrayItem(cosmosResources.delegations);
const amount = fromDelegation.pendingRewards.gt(0)
? fromDelegation.pendingRewards
: new BigNumber(Math.round(fromDelegation.amount.toNumber() * 0.01));
claimRewardOp.extra = {
validator: {
address: fromDelegation.validatorAddress,
amount,
},
};
const delegations = cosmosResources.delegations.map((delegation) => ({
...delegation,
pendingRewards:
delegation.validatorAddress === fromDelegation.validatorAddress
? new BigNumber(0)
: delegation.pendingRewards,
}));
setCosmosResources(account, delegations, undefined, undefined, undefined);
claimRewardOp.fee = new BigNumber(Math.round(amount.toNumber() * 0.001));
claimRewardOp.value = amount;
account.operations.splice(opIndex, 0, claimRewardOp);
account.operationsCount++;
return account;
}
/**
* Generates a cosmos undelegation operation updating both operations list and account cosmos resources
* @memberof cosmos/mock
* @param {Account} account
* @param {Prando} rng
*/
function addUndelegationOperation(account: Account, rng: Prando): Account {
const cosmosResources: CosmosResources = account.cosmosResources
? account.cosmosResources
: {
delegations: [],
delegatedBalance: new BigNumber(0),
pendingRewardsBalance: new BigNumber(0),
unbondingBalance: new BigNumber(0),
withdrawAddress: "",
unbondings: [],
redelegations: [],
};
if (!cosmosResources.delegations.length) return account;
/** select position on the operation stack where we will insert the new claim rewards */
const opIndex = rng.next(0, 10);
const undelegationOp = genBaseOperation(account, rng, "UNDELEGATE", opIndex);
const fromDelegation = rng.nextArrayItem(cosmosResources.delegations);
const amount = new BigNumber(
Math.round(
fromDelegation.amount.toNumber() *
(rng.nextBoolean() ? rng.next(0.1, 1) : 1)
)
);
const claimedReward = fromDelegation.pendingRewards;
undelegationOp.extra = {
validator: {
address: fromDelegation.validatorAddress,
amount,
},
};
const delegations = cosmosResources.delegations
.map((delegation) => ({
...delegation,
amount:
delegation.validatorAddress === fromDelegation.validatorAddress
? delegation.amount.minus(amount)
: delegation.amount,
pendingRewards: new BigNumber(0),
}))
.filter(({ amount }) => amount.gt(0));
setCosmosResources(
account,
delegations,
amount,
[
{
validatorAddress: fromDelegation.validatorAddress,
amount,
completionDate: new Date(Date.now() + 21 * 24 * 60 * 60 * 1000),
},
],
undefined
);
undelegationOp.fee = new BigNumber(Math.round(amount.toNumber() * 0.001));
undelegationOp.value = undelegationOp.fee.minus(claimedReward);
account.operations.splice(opIndex, 0, undelegationOp);
account.operationsCount++;
return account;
}
/**
* add in specific cosmos operations
* @memberof cosmos/mock
* @param {Account} account
* @param {Prando} rng
*/
function genAccountEnhanceOperations(account: Account, rng: Prando): Account {
addDelegationOperation(account, rng);
addRedelegationOperation(account, rng);
addClaimRewardsOperation(account, rng);
addUndelegationOperation(account, rng);
addDelegationOperation(account, rng);
return account;
}
/**
* Update spendable balance for the account based on delegation data
* @memberof cosmos/mock
* @param {Account} account
*/
function postSyncAccount(account: Account): Account {
const cosmosResources = account?.cosmosResources;
const delegatedBalance =
cosmosResources?.delegatedBalance ?? new BigNumber(0);
const unbondingBalance =
cosmosResources?.unbondingBalance ?? new BigNumber(0);
account.spendableBalance = account.balance
.minus(delegatedBalance)
.minus(unbondingBalance);
return account;
}
/**
* post account scan data logic
* clears account cosmos resources if supposed to be empty
* @memberof cosmos/mock
* @param {Account} account
*/
function postScanAccount(
account: Account,
{
isEmpty,
}: {
isEmpty: boolean;
}
): Account {
if (isEmpty) {
account.cosmosResources = {
delegations: [],
delegatedBalance: new BigNumber(0),
pendingRewardsBalance: new BigNumber(0),
unbondingBalance: new BigNumber(0),
withdrawAddress: account.id,
unbondings: [],
redelegations: [],
};
account.operations = [];
}
return account;
}
export default {
genAccountEnhanceOperations,
postSyncAccount,
postScanAccount,
}; | the_stack |
import * as assert from 'assert';
// You can import and use all API from the 'vscode' module
// as well as import your extension to test it
import * as text from '../src/util/AnnotatedText';
// Defines a Mocha test suite to group tests of similar kind together
describe("AnnotatedText", () => {
it("textToString", (() => {
assert.equal(text.textToString("foo"), "foo");
assert.equal(text.textToString(["foo","bar"]), "foobar");
assert.equal(text.textToString([{scope:"aa",text:"foo"},"bar"]), "foobar");
assert.equal(text.textToString([{scope:"aa",text:["foo","!!"]},"bar"]), "foo!!bar");
assert.equal(text.textToString([{substitution:"FOO",diff:"added",text:"foo"},"bar"]), "foobar");
assert.equal(text.textToString([{substitution:"∀", text:"forall"}," x : nat, x = x ",{substitution:"∨", text:"\\/"}," ",{substitution:"⊥", text:"False"}]), "forall x : nat, x = x \\/ False");
assert.equal(text.textToString(["0 = 0 ",{substitution:"∨",text:"\\/"}," ",{substitution:"⊥",text:"False"}]), "0 = 0 \\/ False");
}));
it("textToDisplayString", (() => {
assert.equal(text.textToDisplayString("foo"), "foo");
assert.equal(text.textToDisplayString(["foo","bar"]), "foobar");
assert.equal(text.textToDisplayString([{scope:"aa",text:"foo"},"bar"]), "foobar");
assert.equal(text.textToDisplayString([{scope:"aa",text:["foo","!!"]},"bar"]), "foo!!bar");
assert.equal(text.textToDisplayString([{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]), "FOO!!bar");
assert.equal(text.textToDisplayString([{substitution:"∀", text:"forall"}," x : nat, x = x ",{substitution:"∨", text:"\\/"}," ",{substitution:"⊥", text:"False"}]), "∀ x : nat, x = x ∨ ⊥");
assert.equal(text.textToDisplayString(["0 = 0 ",{substitution:"∨",text:"\\/"}," ",{substitution:"⊥",text:"False"}]), "0 = 0 ∨ ⊥");
assert.equal(text.textToDisplayString([{scope:"aa", text: [{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]}, "dee"]), "FOO!!bardee");
}));
it("textLength", (() => {
assert.equal(text.textLength("foo"), 3);
assert.equal(text.textLength(["foo","bar"]), 6);
assert.equal(text.textLength([{scope:"aa",text:"foo"},"bar"]), 6);
assert.equal(text.textLength([{scope:"aa",text:["foo","!!"]},"bar"]), 8);
assert.equal(text.textLength([{substitution:"FOO",diff:"added",text:"foo"},"bar"]), 6);
}));
it("textDisplayLength", (() => {
assert.equal(text.textDisplayLength("foo"), 3);
assert.equal(text.textDisplayLength(["foo","bar"]), 6);
assert.equal(text.textDisplayLength([{scope:"aa",text:"foo"},"bar"]), 6);
assert.equal(text.textDisplayLength([{scope:"aa",text:["foo","!!"]},"bar"]), 8);
assert.equal(text.textDisplayLength([{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]), 8);
assert.equal(text.textDisplayLength([{scope:"aa", text: [{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]}, "dee"]), 11);
}));
it("isScopedText", (() => {
assert(!text.isScopedText({text:'aa',diff:'added'}));
assert(!text.isScopedText({text:'bb',diff:'added'}));
}));
it("isTextAnnotation", (() => {
assert(text.isTextAnnotation({text:'aa',diff:'added'}));
assert(text.isTextAnnotation({text:'bb',diff:'added'}));
}));
it("compatibleAnnotations", (() => {
assert(text.compatibleAnnotations({text:'aa',diff:'added'},{text:'bb',diff:'added'}));
}));
it("tryCombineText", (() => {
assert.deepStrictEqual(text.tryCombineText({text:'aa',diff:'added'},{text:'bb',diff:'added'}), { diff: 'added', text: 'aabb' });
assert.deepStrictEqual(text.tryCombineText({text:'aa',diff:'added'},""), { diff: 'added', text: 'aa' });
assert.deepStrictEqual(text.tryCombineText("",{text:'aa',diff:'added'}), { diff: 'added', text: 'aa' });
}));
it("normalizeText", (() => {
assert.equal(text.normalizeText("foo"), "foo");
assert.equal(text.normalizeText(["foo","bar"]), "foobar");
assert.equal(text.normalizeText(["foo","\n","bar"]), "foo\nbar");
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:"foo"}, ""]), {scope:"aa", text:"foo"});
assert.deepStrictEqual(text.normalizeText({scope:"aa",attributes: {}, text:"foo"}), {scope:"aa", text:"foo"});
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:"foo"},"bar"]), [{scope:"aa",text:"foo"},"bar"]);
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:["foo","!!"]},"bar"]), [{scope:"aa",text:"foo!!"},"bar"]);
assert.deepStrictEqual(text.normalizeText([{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]), [{substitution:"FOO!!",diff:"added",text:"foo"},"bar"]);
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:["foo","!!"]},{scope:"aa",text:["bar"]}]), {scope:"aa",text:"foo!!bar"});
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:["foo","!!"]},"\n",{scope:"aa",text:["bar"]}]), [{scope:"aa",text:"foo!!"},"\n",{scope:"aa",text:"bar"}]);
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:{scope:"",text:["foo","!!"]}},"bar"]), [{scope:"aa",text:"foo!!"},"bar"]);
assert.deepStrictEqual(text.normalizeText([{scope:"aa",text:{scope:"",text:["foo","!!"]}},{scope:"aa",text:["bar"]}]), {scope:"aa",text:"foo!!bar"});
assert.deepStrictEqual(text.normalizeText({diff:"added",text:"aabbaa"}),{diff:"added",text:"aabbaa"});
assert.deepStrictEqual(text.normalizeText([{text: 'aa',diff: 'added'},{text: 'bb',diff: 'added'},{text: 'aa',diff: 'added'}]), { diff: 'added', text: 'aabbaa' });
function notation(s: text.AnnotatedText) : (string | text.TextAnnotation | text.ScopedText) {
return {scope: "constr.notation", text: s}
}
function variable(s: text.AnnotatedText) : (string | text.TextAnnotation | text.ScopedText) {
return {scope: "constr.variable", text: s}
}
const x1 = [notation("["),variable("d")];
assert.deepStrictEqual(text.normalizeText(x1), x1);
const x2 = [notation("["),variable("d"),notation("]")," ",notation("=")," ",notation("[]")];
assert.deepStrictEqual(text.normalizeText(x2), x2);
}));
it("textSplit", (() => {
assert.deepStrictEqual(text.textSplit("foo bar", " "), {splits: ["foo", "bar"], rest: []});
assert.deepStrictEqual(text.textSplit("foo bar", " "), {splits: ["foo", "bar"], rest: []});
assert.deepStrictEqual(text.textSplit(["foo bar", " dee doo "], " "), {splits: ["foo", "bar", "dee", "doo"], rest: []});
assert.deepStrictEqual(text.textSplit([{scope:"aa",text:"foo"}," bar"], " "), {splits: [{scope:"aa",text:"foo"}, "bar"], rest: []});
assert.deepStrictEqual(text.textSplit([{scope:"aa",text:"foo buh "}," bar"], " "), {splits: [{scope:"aa",text:"foo"},{scope:"aa",text:"buh"},"bar"], rest: []});
assert.deepStrictEqual(text.textSplit("H1 : nat := 1=1", /(:=|:)([^]*)/), {splits: ["H1 ", ":", " nat := 1=1"], rest: []});
assert.deepStrictEqual(text.textSplit(["H1 ",{diff: "added", text: ": nat := 1=1"}], /(:=|:)([^]*)/), {splits: ["H1 ", {diff:"added",text:":"}, {diff:"added",text:" nat := 1=1"}], rest: []});
assert.deepStrictEqual(text.textSplit(["H1 ",{diff: "added", text: ": nat := 1=1"}], /(:=|:)([^]*)/,2), {splits: ["H1 ", {diff:"added",text:":"}], rest: [{diff:"added",text:" nat := 1=1"}]});
assert.deepStrictEqual(text.textSplit(["H1 ",{diff: "added", text: ": nat := 1=1"}], /(:=|:)([^]*)/,3), {splits: ["H1 ", {diff:"added",text:":"}, {diff:"added",text:" nat := 1=1"}], rest: []});
}));
it("mapAnnotation", (() => {
let hist : [string,text.Annotation,number,number][] = [];
let x : text.AnnotatedText = "foo";
assert.deepStrictEqual(text.mapAnnotation(x,(plainText,annotation,start, startD) => {
hist.push([plainText,annotation,start, startD])
return Object.assign(text.copyAnnotation(annotation),{text:plainText})
}),{ text: 'foo' })
assert.deepStrictEqual(hist, [["foo",{},0,0]]);
//////
hist = [];
x = [{substitution: "bar!!", text: "foo"}, "def"]
assert.deepStrictEqual(text.mapAnnotation(x,(plainText,annotation,start, startD) => {
hist.push([plainText,annotation,start, startD])
return Object.assign(text.copyAnnotation(annotation),{text:plainText})
}),[{substitution: "bar!!", text: "foo"}, {text: "def"}])
assert.deepStrictEqual(hist, [
["foo",{substitution: 'bar!!'},0,0],
["def",{},3,5]
]);
}));
it("diffText", (() => {
assert.deepStrictEqual(text.diffText("aaaa","aabbaa").text, {diff:"added",text:"aabbaa"});
assert.deepStrictEqual(text.diffText("aa aa","aa bb aa").text, ["aa ",{diff:"added",text:"bb "},"aa"]);
assert.deepStrictEqual(text.diffText("aa bb aa","aa aa",false).text, ["aa ","aa"]);
assert.deepStrictEqual(text.diffText("aa bb aa","aa aa").text, "aa aa");
assert.deepStrictEqual(text.diffText(["aa","aa"],["aa","bb","aa"],false).text, [{diff:"added",text:"aa"},{diff:"added",text:"bb"},{diff:"added",text:"aa"}]);
assert.deepStrictEqual(text.diffText(["aa","aa"],["aa","bb","aa"]).text, {diff:"added",text:"aabbaa"});
assert.deepStrictEqual(text.diffText({scope: "foo", text:"aa bb aa"},{scope: "bar", text: "aa aa"},false).text, {scope: "bar", text: ["aa ","aa"]});
assert.deepStrictEqual(text.diffText({scope: "foo", text:"aa bb aa"},{scope: "bar", text: "aa aa"}).text, {scope: "bar", text: "aa aa"});
assert.deepStrictEqual(text.diffText({diff: "added", text:"aa bb aa"},{scope: "bar", text: "aa aa"},false).text, {scope: "bar", text: ["aa ","aa"]});
assert.deepStrictEqual(text.diffText({diff: "added", text:"aa bb aa"},{scope: "bar", text: "aa aa"}).text, {scope: "bar", text: "aa aa"});
assert.deepStrictEqual(text.diffText("aa bb aa",{scope: "bar", text: "aa aa"},false).text, {scope: "bar", text: ["aa ","aa"]});
assert.deepStrictEqual(text.diffText("aa bb aa",{scope: "bar", text: "aa aa"}).text, {scope: "bar", text: "aa aa"});
assert.deepStrictEqual(text.diffText({substitution: "AA", text: "aa"},{substitution: "BBB", text: "aa"}).text, {diff: "added", substitution: "BBB", text: "aa"});
assert.deepStrictEqual(text.diffText(
{substitution: "AA AA", text:"aa aa"},
{substitution: "AA BB AA", text: "aa bb aa"}).text,
[{substitution:"AA ",text:"aa bb aa"},{diff:"added",substitution:"BB ",text:""},{substitution:"AA", text:""}]);
assert.deepStrictEqual(text.diffText(
[{substitution: "AA", text:"aa"},{substitution: "AA", text:"aa"}],
[{substitution: "AA", text:"aa"},{substitution: "BB", text:"bb"},{substitution: "AA", text:"aa"}]).text,
[{diff:"added", substitution: "AA", text:"aa"},{diff:"added", substitution: "BB", text:"bb"},{diff:"added", substitution: "AA", text:"aa"}]);
/////
let x = [{substitution:"∀", text:"forall"}," x : nat, x = x ",{substitution:"∨", text:"\\/"}," ",{substitution:"⊥", text:"False"}]; // "∀ x : nat, x = x ∨ ⊥"
let y = ["0 = 0 ",{substitution:"∨",text:"\\/"}," ",{substitution:"⊥",text:"False"}]; // "0 = 0 \\/ False" ~~ "0 = 0 ∨ ⊥"
// "[∀]<0> [x : nat, x ]= [x]<0> ∨ ⊥" --> "<0> = <0> ∨ ⊥"
assert.deepStrictEqual(text.diffText(x,y).text,[{diff: "added", text: "0"}, " = ", {diff: "added", text: "0"}, " ", {substitution:"∨", text:"\\/"}, " ", {substitution:"⊥", text:"False"}]);
}));
it("subtext", function() {
assert.deepStrictEqual(text.subtext("abcdefghij", 0), "abcdefghij")
assert.deepStrictEqual(text.subtext("abcdefghij", 5), "fghij")
assert.deepStrictEqual(text.subtext("abcdefghij", 3, 8), "defgh")
assert.deepStrictEqual(text.subtext(["a","bcd","ef","ghij"], 3), "defghij")
assert.deepStrictEqual(text.subtext(["a","bcd","ef","ghij"], 3, 8), "defgh")
assert.deepStrictEqual(text.subtext({scope: "foo", text:"aa bb aa"},3,5), {scope: "foo", text: "bb"});
assert.deepStrictEqual(text.subtext([{scope: "a", text:"aa"},{scope: "b", text:"bb"}],1,3), [{scope: "a", text:"a"},{scope: "b", text:"b"}]);
})
}); | the_stack |
import path from 'path';
import {
CodeAction,
CodeActionParams,
ColorInformation,
ColorPresentation,
ColorPresentationParams,
CompletionItem,
CompletionList,
CompletionParams,
CompletionTriggerKind,
Definition,
Diagnostic,
DocumentColorParams,
DocumentFormattingParams,
DocumentHighlight,
DocumentLink,
DocumentLinkParams,
DocumentSymbolParams,
FileRename,
FoldingRange,
FoldingRangeParams,
Hover,
Location,
SemanticTokens,
SemanticTokensBuilder,
SemanticTokensParams,
SemanticTokensRangeParams,
SignatureHelp,
SymbolInformation,
TextDocumentEdit,
TextDocumentPositionParams,
TextEdit
} from 'vscode-languageserver';
import { TextDocument } from 'vscode-languageserver-textdocument';
import { URI } from 'vscode-uri';
import { LanguageId } from '../embeddedSupport/embeddedSupport';
import { LanguageMode, LanguageModes } from '../embeddedSupport/languageModes';
import { NULL_COMPLETION, NULL_HOVER, NULL_SIGNATURE } from '../modes/nullMode';
import { DocumentContext, CodeActionData, SemanticTokenData } from '../types';
import { VCancellationToken } from '../utils/cancellationToken';
import { getFileFsPath } from '../utils/paths';
import { DependencyService } from './dependencyService';
import { DocumentService } from './documentService';
import { EnvironmentService } from './EnvironmentService';
import { RefTokensService } from './RefTokenService';
import { VueInfoService } from './vueInfoService';
export interface ProjectService {
env: EnvironmentService;
languageModes: LanguageModes;
onDocumentFormatting(params: DocumentFormattingParams): Promise<TextEdit[]>;
onCompletion(params: CompletionParams): Promise<CompletionList>;
onCompletionResolve(item: CompletionItem): Promise<CompletionItem>;
onHover(params: TextDocumentPositionParams): Promise<Hover>;
onDocumentHighlight(params: TextDocumentPositionParams): Promise<DocumentHighlight[]>;
onDefinition(params: TextDocumentPositionParams): Promise<Definition>;
onReferences(params: TextDocumentPositionParams): Promise<Location[]>;
onDocumentLinks(params: DocumentLinkParams): Promise<DocumentLink[]>;
onDocumentSymbol(params: DocumentSymbolParams): Promise<SymbolInformation[]>;
onDocumentColors(params: DocumentColorParams): Promise<ColorInformation[]>;
onColorPresentations(params: ColorPresentationParams): Promise<ColorPresentation[]>;
onSignatureHelp(params: TextDocumentPositionParams): Promise<SignatureHelp | null>;
onFoldingRanges(params: FoldingRangeParams): Promise<FoldingRange[]>;
onCodeAction(params: CodeActionParams): Promise<CodeAction[]>;
onCodeActionResolve(action: CodeAction): Promise<CodeAction>;
onWillRenameFile(fileRename: FileRename): Promise<TextDocumentEdit[]>;
onSemanticTokens(params: SemanticTokensParams | SemanticTokensRangeParams): Promise<SemanticTokens>;
doValidate(doc: TextDocument, cancellationToken?: VCancellationToken): Promise<Diagnostic[] | null>;
dispose(): Promise<void>;
}
export async function createProjectService(
env: EnvironmentService,
documentService: DocumentService,
globalSnippetDir: string | undefined,
dependencyService: DependencyService,
refTokensService: RefTokensService
): Promise<ProjectService> {
const vueInfoService = new VueInfoService();
const languageModes = new LanguageModes();
function getValidationFlags(): Record<string, boolean> {
const config = env.getConfig();
return {
'vue-html': config.vetur.validation.template || config.vetur.validation.templateProps,
css: config.vetur.validation.style,
postcss: config.vetur.validation.style,
scss: config.vetur.validation.style,
less: config.vetur.validation.style,
javascript: config.vetur.validation.script
};
}
vueInfoService.init(languageModes);
await languageModes.init(
env,
{
infoService: vueInfoService,
dependencyService,
refTokensService
},
globalSnippetDir
);
return {
env,
languageModes,
async onDocumentFormatting({ textDocument, options }) {
if (!env.getConfig().vetur.format.enable) {
return [];
}
const doc = documentService.getDocument(textDocument.uri)!;
const modeRanges = languageModes.getAllLanguageModeRangesInDocument(doc);
const allEdits: TextEdit[] = [];
const errMessages: string[] = [];
modeRanges.forEach(modeRange => {
if (modeRange.mode && modeRange.mode.format) {
try {
const edits = modeRange.mode.format(doc, { start: modeRange.start, end: modeRange.end }, options);
for (const edit of edits) {
allEdits.push(edit);
}
} catch (err) {
errMessages.push((err as Error).toString());
}
}
});
if (errMessages.length !== 0) {
console.error('Formatting failed: "' + errMessages.join('\n') + '"');
return [];
}
return allEdits;
},
async onCompletion({ textDocument, position, context }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.doComplete) {
/**
* Only use space as trigger character in `vue-html` mode
*/
if (
mode.getId() !== 'vue-html' &&
context &&
context?.triggerKind === CompletionTriggerKind.TriggerCharacter &&
context.triggerCharacter === ' '
) {
return NULL_COMPLETION;
}
return mode.doComplete(doc, position);
}
return NULL_COMPLETION;
},
async onCompletionResolve(item) {
if (item.data) {
const uri: string = item.data.uri;
const languageId: LanguageId = item.data.languageId;
/**
* Template files need to go through HTML-template service
*/
if (uri.endsWith('.template')) {
const doc = documentService.getDocument(uri.slice(0, -'.template'.length));
const mode = languageModes.getMode(languageId);
if (doc && mode && mode.doResolve) {
return mode.doResolve(doc, item);
}
}
if (uri && languageId) {
const doc = documentService.getDocument(uri);
const mode = languageModes.getMode(languageId);
if (doc && mode && mode.doResolve) {
return mode.doResolve(doc, item);
}
}
}
return item;
},
async onHover({ textDocument, position }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.doHover) {
return mode.doHover(doc, position);
}
return NULL_HOVER;
},
async onDocumentHighlight({ textDocument, position }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.findDocumentHighlight) {
return mode.findDocumentHighlight(doc, position);
}
return [];
},
async onDefinition({ textDocument, position }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.findDefinition) {
return mode.findDefinition(doc, position);
}
return [];
},
async onReferences({ textDocument, position }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.findReferences) {
return mode.findReferences(doc, position);
}
return [];
},
async onDocumentLinks({ textDocument }) {
const doc = documentService.getDocument(textDocument.uri)!;
const documentContext: DocumentContext = {
resolveReference: ref => {
if (ref[0] === '/') {
return URI.file(path.resolve(env.getProjectRoot(), ref)).toString();
}
const fsPath = getFileFsPath(doc.uri);
return URI.file(path.resolve(fsPath, '..', ref)).toString();
}
};
const links: DocumentLink[] = [];
languageModes.getAllLanguageModeRangesInDocument(doc).forEach(m => {
if (m.mode.findDocumentLinks) {
links.push.apply(links, m.mode.findDocumentLinks(doc, documentContext));
}
});
return links;
},
async onDocumentSymbol({ textDocument }) {
const doc = documentService.getDocument(textDocument.uri)!;
const symbols: SymbolInformation[] = [];
languageModes.getAllLanguageModeRangesInDocument(doc).forEach(m => {
if (m.mode.findDocumentSymbols) {
symbols.push.apply(symbols, m.mode.findDocumentSymbols(doc));
}
});
return symbols;
},
async onDocumentColors({ textDocument }) {
const doc = documentService.getDocument(textDocument.uri)!;
const colors: ColorInformation[] = [];
const distinctModes: Set<LanguageMode> = new Set();
languageModes.getAllLanguageModeRangesInDocument(doc).forEach(m => {
distinctModes.add(m.mode);
});
for (const mode of distinctModes) {
if (mode.findDocumentColors) {
colors.push.apply(colors, mode.findDocumentColors(doc));
}
}
return colors;
},
async onColorPresentations({ textDocument, color, range }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, range.start);
if (mode && mode.getColorPresentations) {
return mode.getColorPresentations(doc, color, range);
}
return [];
},
async onSignatureHelp({ textDocument, position }) {
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, position);
if (mode && mode.doSignatureHelp) {
return mode.doSignatureHelp(doc, position);
}
return NULL_SIGNATURE;
},
async onFoldingRanges({ textDocument }) {
const doc = documentService.getDocument(textDocument.uri)!;
const lmrs = languageModes.getAllLanguageModeRangesInDocument(doc);
const result: FoldingRange[] = [];
lmrs.forEach(lmr => {
if (lmr.mode.getFoldingRanges) {
lmr.mode.getFoldingRanges(doc).forEach(r => result.push(r));
}
result.push({
startLine: lmr.start.line,
startCharacter: lmr.start.character,
endLine: lmr.end.line,
endCharacter: lmr.end.character
});
});
return result;
},
async onCodeAction({ textDocument, range, context }: CodeActionParams) {
if (!env.getConfig().vetur.languageFeatures.codeActions) {
return [];
}
const doc = documentService.getDocument(textDocument.uri)!;
const mode = languageModes.getModeAtPosition(doc, range.start);
if (languageModes.getModeAtPosition(doc, range.end) !== mode) {
return [];
}
if (mode && mode.getCodeActions) {
return mode.getCodeActions(doc, range, /*formatParams*/ {} as any, context);
}
return [];
},
async onCodeActionResolve(action) {
const data = action.data as CodeActionData | undefined;
if (data) {
const uri: string = data.uri;
const languageId: LanguageId = data.languageId;
if (uri && languageId) {
const doc = documentService.getDocument(uri);
const mode = languageModes.getMode(languageId);
if (doc && mode && mode.doCodeActionResolve) {
return mode.doCodeActionResolve(doc, action);
}
}
}
return action;
},
async onWillRenameFile(fileRename: FileRename) {
if (!env.getConfig().vetur.languageFeatures.updateImportOnFileMove) {
return [];
}
const textDocumentEdit = languageModes.getMode('typescript')?.getRenameFileEdit?.(fileRename);
return textDocumentEdit ?? [];
},
async onSemanticTokens(params: SemanticTokensParams | SemanticTokensRangeParams) {
if (!env.getConfig().vetur.languageFeatures.semanticTokens) {
return {
data: []
};
}
const { textDocument } = params;
const range = 'range' in params ? params.range : undefined;
const doc = documentService.getDocument(textDocument.uri)!;
const modes = languageModes.getAllLanguageModeRangesInDocument(doc);
const data: SemanticTokenData[] = [];
for (const mode of modes) {
const tokenData = mode.mode.getSemanticTokens?.(doc, range);
data.push(...(tokenData ?? []));
}
const builder = new SemanticTokensBuilder();
const sorted = data.sort((a, b) => {
return a.line - b.line || a.character - b.character;
});
sorted.forEach(token =>
builder.push(token.line, token.character, token.length, token.classificationType, token.modifierSet)
);
return builder.build();
},
async doValidate(doc: TextDocument, cancellationToken?: VCancellationToken) {
const diagnostics: Diagnostic[] = [];
if (doc.languageId === 'vue') {
const validationFlags = getValidationFlags();
for (const lmr of languageModes.getAllLanguageModeRangesInDocument(doc)) {
if (lmr.mode.doValidation) {
if (validationFlags[lmr.mode.getId()]) {
diagnostics.push.apply(diagnostics, await lmr.mode.doValidation(doc, cancellationToken));
}
// Special case for template type checking
else if (
lmr.mode.getId() === 'vue-html' &&
env.getConfig().vetur.experimental.templateInterpolationService
) {
diagnostics.push.apply(diagnostics, await lmr.mode.doValidation(doc, cancellationToken));
}
}
}
}
if (cancellationToken?.isCancellationRequested) {
return null;
}
return diagnostics;
},
async dispose() {
languageModes.dispose();
}
};
} | the_stack |
import * as CS from "../Cause"
import type * as CL from "../Clock"
import * as HS from "../Collections/Immutable/HashSet"
import * as Tp from "../Collections/Immutable/Tuple"
import * as T from "../Effect"
import * as Ex from "../Exit"
import * as F from "../Fiber"
import { pipe } from "../Function"
import * as M from "../Managed"
import * as Q from "../Queue"
import * as Ref from "../Ref"
import * as AT from "./Attempted"
import * as STR from "./Strategy"
abstract class Pool<Error, Item> {
readonly [T._E]: () => Error;
readonly [T._A]: () => Item
}
abstract class PoolInternal<Error, Item> extends Pool<Error, Item> {
readonly [T._E]: () => Error;
readonly [T._A]: () => Item
abstract get(): M.IO<Error, Item>
abstract invalidate(item: Item): T.UIO<void>
}
function concrete<Error, Item>(
pool: Pool<Error, Item>
): asserts pool is PoolInternal<Error, Item> {
//
}
export function get<Error, Item>(self: Pool<Error, Item>): M.IO<Error, Item> {
concrete(self)
return self.get()
}
export function invalidate_<Error, Item>(
self: Pool<Error, Item>,
item: Item
): T.UIO<void> {
concrete(self)
return self.invalidate(item)
}
export function invalidate<Item>(item: Item) {
return <Error>(self: Pool<Error, Item>) => invalidate_(self, item)
}
export type Range = Tp.Tuple<[begin: number, end: number]>
interface State {
readonly size: number
readonly free: number
}
export class DefaultPool<R, E, A, S> extends PoolInternal<E, A> {
constructor(
readonly creator: M.IO<E, A>,
readonly range: Range,
readonly isShuttingDown: Ref.Ref<boolean>,
readonly state: Ref.Ref<State>,
readonly items: Q.Queue<AT.Attempted<E, A>>,
readonly invalidated: Ref.Ref<HS.HashSet<A>>,
readonly track: (exit: Ex.Exit<E, A>) => T.UIO<void>
) {
super()
this.excess = this.excess.bind(this)
this.get = this.get.bind(this)
this.initialize = this.initialize.bind(this)
this.invalidate = this.invalidate.bind(this)
this.shrink = this.shrink.bind(this)
this.allocate = this.allocate.bind(this)
this.getAndShutdown = this.getAndShutdown.bind(this)
this.shutdown = this.shutdown.bind(this)
}
/**
* Returns the number of items in the pool in excess of the minimum size.
*/
excess(): T.UIO<number> {
return T.map_(
this.state.get,
({ free, size }) => size - Math.min(Tp.get_(this.range, 0), free)
)
}
get(): M.IO<E, A> {
const acquire: T.UIO<AT.Attempted<E, A>> = T.chain_(
this.isShuttingDown.get,
(down) => {
if (down) {
return T.interrupt
} else {
return T.flatten(
Ref.modify_(this.state, ({ free, size }) => {
if (free > 0 || size >= Tp.get_(this.range, 1)) {
return Tp.tuple(
T.chain_(Q.take(this.items), (acquired) => {
if (acquired.result._tag === "Success") {
const item = acquired.result.value
return T.chain_(this.invalidated.get, (set) => {
if (HS.has_(set, item)) {
return pipe(
Ref.update_(this.state, (state) => ({
...state,
free: state.free + 1
})),
T.zipRight(this.allocate()),
T.zipRight(acquire)
)
} else {
return T.succeed(acquired)
}
})
} else {
return T.succeed(acquired)
}
}),
{ size, free: free - 1 }
)
} else if (size >= 0) {
return Tp.tuple(T.zipRight_(this.allocate(), acquire), {
size: size + 1,
free: free + 1
})
} else {
return Tp.tuple(T.interrupt, { size, free })
}
})
)
}
}
)
const release = (attempted: AT.Attempted<E, A>): T.UIO<void> => {
if (AT.isFailure(attempted)) {
return T.flatten(
Ref.modify_(this.state, ({ free, size }) => {
if (size <= Tp.get_(this.range, 0)) {
return Tp.tuple(this.allocate(), { size, free: free + 1 })
} else {
return Tp.tuple(T.unit, { size: size - 1, free })
}
})
)
} else {
return pipe(
Ref.update_(this.state, (state) => ({ ...state, free: state.free + 1 })),
T.zipRight(Q.offer_(this.items, attempted)),
T.zipRight(this.track(attempted.result)),
T.zipRight(T.whenM_(this.getAndShutdown(), this.isShuttingDown.get))
)
}
}
return M.chain_(M.make_(acquire, release), AT.toManaged)
}
/**
* Begins pre-allocating pool entries based on minimum pool size.
*/
initialize(): T.UIO<void> {
return T.replicateMUnit_(
T.uninterruptibleMask(({ restore }) =>
T.flatten(
Ref.modify_(this.state, ({ free, size }) => {
if (size < Tp.get_(this.range, 0) && size >= 0) {
return Tp.tuple(
pipe(
T.do,
T.bind("reservation", () => M.managedReserve(this.creator)),
T.bind("exit", ({ reservation }) =>
T.result(restore(reservation.acquire))
),
T.bind("attempted", ({ exit, reservation }) =>
T.succeed(
new AT.Attempted(exit, reservation.release(Ex.succeed(undefined)))
)
),
T.tap(({ attempted }) => Q.offer_(this.items, attempted)),
T.tap(({ attempted }) => this.track(attempted.result)),
T.tap(() => T.whenM_(this.getAndShutdown(), this.isShuttingDown.get)),
T.map(({ attempted }) => attempted)
),
{ size: size + 1, free: free + 1 }
)
} else {
return Tp.tuple(T.unit, { size, free })
}
})
)
),
Tp.get_(this.range, 0)
)
}
invalidate(item: A): T.UIO<void> {
return Ref.update_(this.invalidated, (_) => HS.add_(_, item))
}
/**
* Shrinks the pool down, but never to less than the minimum size.
*/
shrink(): T.UIO<unknown> {
return T.uninterruptible(
T.flatten(
Ref.modify_(this.state, ({ free, size }) => {
if (size > Tp.get_(this.range, 0) && free > 0) {
return Tp.tuple(
T.chain_(Q.take(this.items), (attempted) =>
pipe(
attempted,
AT.forEachUnit((a) =>
Ref.update_(this.invalidated, (_) => HS.remove_(_, a))
),
T.zipRight(attempted.finalizer),
T.zipRight(
Ref.update_(this.state, (state) => ({
...state,
size: state.size - 1
}))
)
)
),
{ size, free: free - 1 }
)
} else {
return Tp.tuple(T.unit, { size, free })
}
})
)
)
}
allocate(): T.UIO<unknown> {
return T.uninterruptibleMask(({ restore }) =>
pipe(
T.do,
T.bind("reservation", () => M.managedReserve(this.creator)),
T.bind("exit", ({ reservation }) => T.result(restore(reservation.acquire))),
T.bind("attempted", ({ exit, reservation }) =>
T.succeed(new AT.Attempted(exit, reservation.release(Ex.succeed(undefined))))
),
T.tap(({ attempted }) => Q.offer_(this.items, attempted)),
T.tap(({ attempted }) => this.track(attempted.result)),
T.tap(() => T.whenM_(this.getAndShutdown(), this.isShuttingDown.get)),
T.map(({ attempted }) => attempted)
)
)
}
/**
* Gets items from the pool and shuts them down as long as there are items
* free, signalling shutdown of the pool if the pool is empty.
*/
getAndShutdown(): T.UIO<void> {
return T.flatten(
Ref.modify_(this.state, ({ free, size }) => {
if (free > 0) {
return Tp.tuple(
T.foldCauseM_(
Q.take(this.items),
(_) => T.unit,
(attempted) =>
pipe(
attempted,
AT.forEachUnit((a) =>
Ref.update_(this.invalidated, (_) => HS.remove_(_, a))
),
T.zipRight(attempted.finalizer),
T.zipRight(
Ref.update_(this.state, (state) => ({
...state,
size: state.size - 1
}))
),
T.zipRight(this.getAndShutdown())
)
),
{ size, free: free - 1 }
)
} else if (size > 0) {
return Tp.tuple(T.unit, { size, free })
} else {
return Tp.tuple(Q.shutdown(this.items), { size: size - 1, free })
}
})
)
}
shutdown(): T.UIO<void> {
return T.flatten(
Ref.modify_(this.isShuttingDown, (down) => {
if (down) {
return Tp.tuple(Q.awaitShutdown(this.items), true)
} else {
return Tp.tuple(
T.zipRight_(this.getAndShutdown(), Q.awaitShutdown(this.items)),
true
)
}
})
)
}
}
/**
* Creates a pool from a fixed number of pre-allocated items. This method
* should only be used when there is no cleanup or release operation
* associated with items in the pool. If cleanup or release is required,
* then the `make` constructor should be used instead.
*/
export function fromIterable<A>(
iterable0: Iterable<A>
): M.UIO<Pool<never, NonNullable<A>>> {
return pipe(
M.do,
M.bind("iterable", () => M.succeed(Array.from(iterable0))),
M.bind("source", ({ iterable }) => T.toManaged(Ref.makeRef(iterable))),
M.let("get", ({ iterable, source }) => {
if (!iterable.length) {
return T.never
} else {
return Ref.modify_(source, (a) => {
if (a.length > 0) {
return Tp.tuple(a[0]!, a.slice(1))
}
throw new CS.IllegalArgumentException("No item in array")
})
}
}),
M.bind("pool", ({ get, iterable }) =>
makeFixed(M.fromEffect(get), iterable.length)
),
M.map(({ pool }) => pool)
)
}
/**
* Makes a new pool of the specified fixed size. The pool is returned in a
* `Managed`, which governs the lifetime of the pool. When the pool is
* shutdown because the `Managed` is used, the individual items allocated by
* the pool will be released in some unspecified order.
*/
export function makeFixed<R, E, A>(
get: M.Managed<R, E, A>,
min: number
): M.RIO<R, Pool<E, A>> {
return makeWith(get, Tp.tuple(min, min), new STR.None())
}
/**
* Makes a new pool with the specified minimum and maximum sizes and time to
* live before a pool whose excess items are not being used will be shrunk
* down to the minimum size. The pool is returned in a `Managed`, which
* governs the lifetime of the pool. When the pool is shutdown because the
* `Managed` is used, the individual items allocated by the pool will be
* released in some unspecified order.
*/
export function make<R, E, A>(
get: M.IO<E, A>,
range: Range,
timeToLive: number
): M.RIO<R & CL.HasClock, Pool<E, A>> {
return makeWith(get, range, new STR.TimeToLive(timeToLive))
}
/**
* A more powerful variant of `make` that allows specifying a `Strategy` that
* describes how a pool whose excess items are not being used will be shrunk
* down to the minimum size.
*/
export function makeWith<R, R1, E, A>(
get: M.Managed<R, E, A>,
range: Range,
strategy: STR.Strategy<R1, E, A>
): M.RIO<R & R1, Pool<E, A>> {
return pipe(
M.do,
M.bind("env", () => M.environment<R>()),
M.bind("down", () => T.toManaged(Ref.makeRef(false))),
M.bind("state", () => T.toManaged(Ref.makeRef<State>({ size: 0, free: 0 }))),
M.bind("items", () =>
T.toManaged(Q.makeBounded<AT.Attempted<E, A>>(Tp.get_(range, 1)))
),
M.bind("inv", () => T.toManaged(Ref.makeRef(HS.make<A>()))),
M.bind("initial", () => T.toManaged(strategy.initial())),
M.let(
"pool",
({ down, env, initial, inv, items, state }) =>
new DefaultPool(
M.provideAll_(get, env),
range,
down,
state,
items,
inv,
strategy.track(initial)
)
),
M.bind("fiber", ({ pool }) => T.toManaged(T.forkDaemon(pool.initialize()))),
M.bind("shrink", ({ initial, pool }) =>
T.toManaged(T.forkDaemon(strategy.run(initial, pool.excess(), pool.shrink())))
),
M.tap(({ fiber, pool, shrink }) =>
M.finalizer(
pipe(
F.interrupt(fiber),
T.zipRight(F.interrupt(shrink)),
T.zipRight(pool.shutdown())
)
)
),
M.map(({ pool }) => pool)
)
} | the_stack |
// IMPORTANT
// This file was generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually.
// In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator
// Generated from: https://www.googleapis.com/discovery/v1/apis/siteVerification/v1/rest
/// <reference types="gapi.client" />
declare namespace gapi.client {
/** Load Google Site Verification API v1 */
function load(name: "siteverification", version: "v1"): PromiseLike<void>;
function load(name: "siteverification", version: "v1", callback: () => any): void;
const webResource: siteverification.WebResourceResource;
namespace siteverification {
interface SiteVerificationWebResourceGettokenRequest {
/** The site for which a verification token will be generated. */
site?: {
/** The site identifier. If the type is set to SITE, the identifier is a URL. If the type is set to INET_DOMAIN, the site identifier is a domain name. */
identifier?: string;
/** The type of resource to be verified. Can be SITE or INET_DOMAIN (domain name). */
type?: string;
};
/** The verification method that will be used to verify this site. For sites, 'FILE' or 'META' methods may be used. For domains, only 'DNS' may be used. */
verificationMethod?: string;
}
interface SiteVerificationWebResourceGettokenResponse {
/**
* The verification method to use in conjunction with this token. For FILE, the token should be placed in the top-level directory of the site, stored
* inside a file of the same name. For META, the token should be placed in the HEAD tag of the default page that is loaded for the site. For DNS, the
* token should be placed in a TXT record of the domain.
*/
method?: string;
/** The verification token. The token must be placed appropriately in order for verification to succeed. */
token?: string;
}
interface SiteVerificationWebResourceListResponse {
/** The list of sites that are owned by the authenticated user. */
items?: SiteVerificationWebResourceResource[];
}
interface SiteVerificationWebResourceResource {
/** The string used to identify this site. This value should be used in the "id" portion of the REST URL for the Get, Update, and Delete operations. */
id?: string;
/** The email addresses of all verified owners. */
owners?: string[];
/** The address and type of a site that is verified or will be verified. */
site?: {
/** The site identifier. If the type is set to SITE, the identifier is a URL. If the type is set to INET_DOMAIN, the site identifier is a domain name. */
identifier?: string;
/** The site type. Can be SITE or INET_DOMAIN (domain name). */
type?: string;
};
}
interface WebResourceResource {
/** Relinquish ownership of a website or domain. */
delete(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** The id of a verified site or domain. */
id: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<void>;
/** Get the most current data for a website or domain. */
get(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** The id of a verified site or domain. */
id: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<SiteVerificationWebResourceResource>;
/** Get a verification token for placing on a website or domain. */
getToken(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<SiteVerificationWebResourceGettokenResponse>;
/** Attempt verification of a website or domain. */
insert(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
/** The method to use for verifying a site or domain. */
verificationMethod: string;
}): Request<SiteVerificationWebResourceResource>;
/** Get the list of your verified websites and domains. */
list(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<SiteVerificationWebResourceListResponse>;
/** Modify the list of owners for your website or domain. This method supports patch semantics. */
patch(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** The id of a verified site or domain. */
id: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<SiteVerificationWebResourceResource>;
/** Modify the list of owners for your website or domain. */
update(request: {
/** Data format for the response. */
alt?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** The id of a verified site or domain. */
id: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** IP address of the site where the request originates. Use this if you want to enforce per-user limits. */
userIp?: string;
}): Request<SiteVerificationWebResourceResource>;
}
}
} | the_stack |
import { Workbook, CellModel, getCell } from '../base/index';
import { executeTaskAsync } from '../common/worker';
import { pdfLayoutSettings, SaveOptions, checkIsFormula, workbookFormulaOperation } from '../common/index';
import * as events from '../common/event';
import { SaveWorker } from '../workers/save-worker';
import { SaveCompleteEventArgs } from '../common/index';
import { checkUniqueRange } from '../../workbook/index';
import { detach } from '@syncfusion/ej2-base';
/**
* @hidden
* The `WorkbookSave` module is used to handle the save action in Workbook library.
*/
export class WorkbookSave extends SaveWorker {
private isProcessCompleted: boolean = false;
private saveSettings: SaveOptions;
private saveJSON: { [key: string]: object } = {};
private isFullPost: boolean = false;
private needBlobData: boolean = false;
private customParams: Object = null;
private pdfLayoutSettings: pdfLayoutSettings = {fitSheetOnOnePage: false};
/**
* Constructor for WorkbookSave module in Workbook library.
*
* @private
* @param {Workbook} parent - Specifies the workbook.
*/
constructor(parent: Workbook) {
super(parent);
this.addEventListener();
}
/**
* Get the module name.
*
* @returns {string} - To Get the module name.
* @private
*/
public getModuleName(): string {
return 'workbookSave';
}
/**
* To destroy the WorkbookSave module.
*
* @returns {void} - To destroy the WorkbookSave module.
* @hidden
*/
public destroy(): void {
this.removeEventListener();
this.parent = null;
}
/**
* @hidden
* @returns {void} - add Event Listener
*/
private addEventListener(): void {
this.parent.on(events.beginSave, this.initiateSave, this);
}
/**
* @hidden
* @returns {void} - remove Event Listener.
*/
private removeEventListener(): void {
if (!this.parent.isDestroyed) {
this.parent.off(events.beginSave, this.initiateSave);
}
}
/**
* Initiate save process.
*
* @hidden
* @param {Object} args - Specify the args.
* @returns {void} - Initiate save process.
*/
private initiateSave(args: { [key: string]: Object }): void {
const saveSettings: SaveOptions = args.saveSettings;
this.parent.notify(events.getFilteredCollection, null);
this.saveSettings = {
saveType: saveSettings.saveType,
url: saveSettings.url,
fileName: saveSettings.fileName || 'Sample'
//passWord: saveSettings.passWord
};
this.isFullPost = args.isFullPost as boolean;
this.needBlobData = args.needBlobData as boolean;
if (this.needBlobData) { this.isFullPost = false; }
this.customParams = args.customParams;
this.pdfLayoutSettings = args.pdfLayoutSettings;
this.updateBasicSettings();
this.processSheets();
}
/**
* Update save JSON with basic settings.
*
* @hidden
* @returns {void} - Update save JSON with basic settings.
*/
private updateBasicSettings(): void {
const jsonStr: string = this.getStringifyObject(this.parent, ['sheets', '_isScalar', 'observers', 'closed', 'isStopped', 'hasError',
'__isAsync', 'beforeCellFormat', 'beforeCellRender', 'beforeDataBound', 'beforeOpen', 'beforeSave', 'beforeSelect',
'beforeSort', 'cellEdit', 'cellEditing', 'cellSave', 'beforeCellSave', 'contextMenuItemSelect', 'contextMenuBeforeClose',
'contextMenuBeforeOpen', 'created', 'dataBound', 'fileMenuItemSelect', 'fileMenuBeforeClose', 'fileMenuBeforeOpen',
'saveComplete', 'sortComplete', 'select', 'actionBegin', 'actionComplete', 'afterHyperlinkClick', 'afterHyperlinkCreate',
'beforeHyperlinkClick', 'beforeHyperlinkCreate', 'openComplete', 'openFailure', 'queryCellInfo', 'dialogBeforeOpen',
'dataSourceChanged', 'beforeConditionalFormat', 'beforeCellUpdate']);
const basicSettings: { [key: string]: Object } = JSON.parse(jsonStr);
const sheetCount: number = this.parent.sheets.length;
if (sheetCount) {
basicSettings.sheets = [];
}
this.saveJSON = basicSettings;
}
/**
* Process sheets properties.
*
* @hidden
* @returns {void} - Process sheets properties.
*/
private processSheets(): void {
const skipProps: string[] = ['dataSource', 'startCell', 'query', 'showFieldAsHeader'];
// eslint-disable-next-line
if ((this.parent as any).isAngular) {
skipProps.push('template');
}
for (let i: number = 0, sheetCount: number = this.parent.sheets.length; i < sheetCount; i++) {
executeTaskAsync(this, this.processSheet, this.updateSheet, [this.getStringifyObject(this.parent.sheets[i], skipProps, i), i]);
}
}
/**
* Update processed sheet data.
*
* @hidden
* @param {Object[]} data - Specifies the data.
* @returns {void} - Update processed sheet data.
*/
private updateSheet(data: Object[]): void {
(this.saveJSON.sheets as { [key: string]: Object })[data[0] as string] = data[1];
this.isProcessCompleted = this.getSheetLength(this.saveJSON.sheets as []) === this.parent.sheets.length;
if (this.isProcessCompleted) {
this.save(this.saveSettings);
}
}
private getSheetLength(sheets: string[]): number {
let len: number = 0;
sheets.forEach((sheet: string) => {
if (sheet) { len++; }
});
return len;
}
/**
* Save process.
*
* @hidden
* @param {SaveOptions} saveSettings - Specifies the save settings props.
* @returns {void} - Save process.
*/
private save(saveSettings: SaveOptions): void {
const args: { cancel: boolean, jsonObject: object } = { cancel: false, jsonObject: this.saveJSON };
this.parent.notify(events.onSave, args);
if (!args.cancel) {
if (this.isFullPost) {
this.initiateFullPostSave();
} else {
executeTaskAsync(
this, { 'workerTask': this.processSave },
this.updateSaveResult, [this.saveJSON, saveSettings, this.customParams, this.pdfLayoutSettings], true);
}
}
this.saveJSON = {};
}
/**
* Update final save data.
*
* @hidden
* @param {Object | Blob} result - specify the sve result.
* @returns {void} - Update final save data.
*/
private updateSaveResult(result: { [key: string]: Object } | Blob): void {
const args: SaveCompleteEventArgs = {
status: 'Success',
message: '',
url: this.saveSettings.url,
fileName: this.saveSettings.fileName,
saveType: this.saveSettings.saveType,
blobData: null
};
if (typeof (result) === 'object' && (<{ [key: string]: Object }>result).error) {
args.status = 'Failure';
args.message = (<{ [key: string]: Object }>result).error.toString();
} else if (typeof (result) === 'object' && (<{ [key: string]: Object }>result).dialog) {
this.parent.notify(events.saveError, { content: (<{ [key: string]: Object }>result).dialog });
} else {
if (this.needBlobData) {
args.blobData = result as Blob;
} else {
this.ClientFileDownload(result as Blob);
}
}
this.parent.trigger('saveComplete', args);
this.parent.notify(events.saveCompleted, args);
}
private ClientFileDownload(blobData: Blob): void {
const anchor: HTMLAnchorElement = this.parent.createElement(
'a', { attrs: { download: this.getFileNameWithExtension() } }) as HTMLAnchorElement;
const url: string = URL.createObjectURL(blobData);
anchor.href = url;
document.body.appendChild(anchor);
anchor.click();
URL.revokeObjectURL(url);
document.body.removeChild(anchor);
}
private initiateFullPostSave(): void {
let keys: string[] = Object.keys(this.saveSettings);
let i: number;
const formElem: HTMLFormElement = this.parent.createElement(
'form', { attrs: { method: 'POST', action: this.saveSettings.url } }
) as HTMLFormElement;
let inputElem: HTMLInputElement = this.parent.createElement(
'input', { attrs: { type: 'hidden', name: 'JSONData' } }) as HTMLInputElement;
inputElem.value = JSON.stringify(this.saveJSON);
formElem.appendChild(inputElem);
for (i = 0; i < keys.length; i++) {
inputElem = this.parent.createElement(
'input', { attrs: { type: 'hidden', name: keys[i] } }) as HTMLInputElement;
inputElem.value = this.saveSettings[keys[i]];
formElem.appendChild(inputElem);
}
keys = Object.keys(this.customParams);
for (i = 0; i < keys.length; i++) {
inputElem = this.parent.createElement(
'input', { attrs: { type: 'hidden', name: keys[i] } }) as HTMLInputElement;
inputElem.value = this.customParams[keys[i]];
formElem.appendChild(inputElem);
}
inputElem = this.parent.createElement(
'input', { attrs: { type: 'hidden', name: 'pdfLayoutSettings' } }) as HTMLInputElement;
inputElem.value = JSON.stringify(this.pdfLayoutSettings);
formElem.appendChild(inputElem);
document.body.appendChild(formElem);
formElem.submit();
detach(formElem);
this.parent.notify(events.saveCompleted, {});
}
/**
* Get stringified workbook object.
*
* @hidden
* @param {object} model - Specifies the workbook or sheet model.
* @param {string[]} skipProp - specifies the skipprop.
* @returns {string} - Get stringified workbook object.
*/
private getStringifyObject(model: object, skipProp: string[] = [], sheetIdx?: number): string {
return JSON.stringify(model, (key: string, value: { [key: string]: object }) => {
if (skipProp.indexOf(key) > -1) {
return undefined;
} else {
if (value && value.cells) {
for (let i: number = 0, len: number = (value.cells as CellModel[]).length; i < len; i++) {
const cell: CellModel = value.cells[i];
const args: { cellIdx: number[], isUnique: boolean, uniqueRange: string } = {
cellIdx: [Number(key), i], isUnique: false, uniqueRange: ''
};
this.parent.notify(checkUniqueRange, args);
if (cell) {
if ((cell.formula && (cell.formula.indexOf('=UNIQUE(') > -1)) || args.isUnique) {
delete cell.value;
continue;
}
if (!cell.value && cell.formula && cell.formula.indexOf('=UNIQUE(') < 0) {
this.parent.notify(
workbookFormulaOperation, { action: 'refreshCalculate', value: cell.formula, rowIndex: args.cellIdx[0],
colIndex: i, isFormula: checkIsFormula(cell.formula), sheetIndex: sheetIdx, isRefreshing: true });
cell.value = getCell(args.cellIdx[0], i, model).value;
}
}
}
}
// eslint-disable-next-line no-prototype-builtins
if (value && typeof value === 'object' && value.hasOwnProperty('properties')) {
return value.properties;
} else if (value !== null) {
return value;
} else {
return undefined;
}
}
});
}
private getFileNameWithExtension(filename?: string): string {
if (!filename) { filename = this.saveSettings.fileName; }
const fileExt: string = this.getFileExtension();
const idx: number = filename.lastIndexOf('.');
if (idx > -1) {
filename = filename.substr(0, idx);
}
return (filename + fileExt);
}
private getFileExtension(): string {
return ('.' + this.saveSettings.saveType.toLowerCase());
}
} | the_stack |
import { Disposable } from "event-kit";
import { File } from "pathwatcher";
declare global {
namespace TextBuffer {
/**
* The event objects that are passed into the callbacks which the user provides to
* specific API calls.
*/
namespace Events {
interface BufferWatchError {
/** The error object. */
error: Error;
/**
* Call this function to indicate you have handled the error.
* The error will not be thrown if this function is called.
*/
handle(): void;
}
interface FileSaved {
/** The path to which the buffer was saved. */
path: string;
}
interface MarkerChanged {
/** Point representing the former head position. */
oldHeadPosition: Point;
/** Point representing the new head position. */
newHeadPosition: Point;
/** Point representing the former tail position. */
oldTailPosition: Point;
/** Point representing the new tail position. */
newTailPosition: Point;
/** Boolean indicating whether the marker was valid before the change. */
wasValid: boolean;
/** Boolean indicating whether the marker is now valid. */
isValid: boolean;
/** Boolean indicating whether the marker had a tail before the change. */
hadTail: boolean;
/** Boolean indicating whether the marker now has a tail. */
hasTail: boolean;
/**
* Object containing the marker's custom properties before the change.
* @deprecated
*/
oldProperties: object;
/**
* Object containing the marker's custom properties after the change.
* @deprecated
*/
newProperties: object;
/**
* Boolean indicating whether this change was caused by a textual
* change to the buffer or whether the marker was manipulated directly
* via its public API.
*/
textChanged: boolean;
}
interface DisplayMarkerChanged {
/** Point representing the former head buffer position. */
oldHeadBufferPosition: Point;
/** Point representing the new head buffer position. */
newHeadBufferPosition: Point;
// Point representing the former tail buffer position. */
oldTailBufferPosition: Point;
/** Point representing the new tail buffer position. */
newTailBufferPosition: Point;
/** Point representing the former head screen position. */
oldHeadScreenPosition: Point;
/** Point representing the new head screen position. */
newHeadScreenPosition: Point;
/** Point representing the former tail screen position. */
oldTailScreenPosition: Point;
/** Point representing the new tail screen position. */
newTailScreenPosition: Point;
/** Boolean indicating whether the marker was valid before the change. */
wasValid: boolean;
/** Boolean indicating whether the marker is now valid. */
isValid: boolean;
/** Boolean indicating whether the marker had a tail before the change. */
hadTail: boolean;
/** Boolean indicating whether the marker now has a tail */
hasTail: boolean;
/**
* Object containing the marker's custom properties before the change.
* @deprecated
*/
oldProperties: object;
/**
* Object containing the marker's custom properties after the change.
* @deprecated
*/
newProperties: object;
/**
* Boolean indicating whether this change was caused by a textual change to the
* buffer or whether the marker was manipulated directly via its public API.
*/
textChanged: boolean;
}
interface BufferChanging {
/** Range of the old text. */
oldRange: Range;
}
interface BufferChanged {
/** Range of the old text. */
oldRange: Range;
/** Range of the new text. */
newRange: Range;
/** String containing the text that was replaced. */
oldText: string;
/** String containing the text that was inserted. */
newText: string;
}
interface BufferStoppedChanging {
changes: Structures.TextChange[];
}
}
/**
* The option objects that the user is expected to fill out and provide to
* specific API calls.
*/
namespace Options {
interface BufferLoad {
/** The file's encoding. */
encoding?: string | undefined;
/**
* A function that returns a boolean indicating whether the buffer should
* be destroyed if its file is deleted.
*/
shouldDestroyOnFileDelete?(): boolean;
}
interface FindMarker {
/** Only include markers that start at the given Point. */
startPosition?: PointCompatible | undefined;
/** Only include markers that end at the given Point. */
endPosition?: PointCompatible | undefined;
/** Only include markers that start inside the given Range. */
startsInRange?: RangeCompatible | undefined;
/** Only include markers that end inside the given Range. */
endsInRange?: RangeCompatible | undefined;
/** Only include markers that contain the given Point, inclusive. */
containsPoint?: PointCompatible | undefined;
/** Only include markers that contain the given Range, inclusive. */
containsRange?: RangeCompatible | undefined;
/** Only include markers that start at the given row number. */
startRow?: number | undefined;
/** Only include markers that end at the given row number. */
endRow?: number | undefined;
/** Only include markers that intersect the given row number. */
intersectsRow?: number | undefined;
}
interface FindDisplayMarker {
/** Only include markers starting at this Point in buffer coordinates. */
startBufferPosition?: PointCompatible | undefined;
/** Only include markers ending at this Point in buffer coordinates. */
endBufferPosition?: PointCompatible | undefined;
/** Only include markers starting at this Point in screen coordinates. */
startScreenPosition?: PointCompatible | undefined;
/** Only include markers ending at this Point in screen coordinates. */
endScreenPosition?: PointCompatible | undefined;
/** Only include markers starting inside this Range in buffer coordinates. */
startsInBufferRange?: RangeCompatible | undefined;
/** Only include markers ending inside this Range in buffer coordinates. */
endsInBufferRange?: RangeCompatible | undefined;
/** Only include markers starting inside this Range in screen coordinates. */
startsInScreenRange?: RangeCompatible | undefined;
/** Only include markers ending inside this Range in screen coordinates. */
endsInScreenRange?: RangeCompatible | undefined;
/** Only include markers starting at this row in buffer coordinates. */
startBufferRow?: number | undefined;
/** Only include markers ending at this row in buffer coordinates. */
endBufferRow?: number | undefined;
/** Only include markers starting at this row in screen coordinates. */
startScreenRow?: number | undefined;
/** Only include markers ending at this row in screen coordinates. */
endScreenRow?: number | undefined;
/**
* Only include markers intersecting this Array of [startRow, endRow] in
* buffer coordinates.
*/
intersectsBufferRowRange?: [number, number] | undefined;
/**
* Only include markers intersecting this Array of [startRow, endRow] in
* screen coordinates.
*/
intersectsScreenRowRange?: [number, number] | undefined;
/** Only include markers containing this Range in buffer coordinates. */
containsBufferRange?: RangeCompatible | undefined;
/** Only include markers containing this Point in buffer coordinates. */
containsBufferPosition?: PointCompatible | undefined;
/** Only include markers contained in this Range in buffer coordinates. */
containedInBufferRange?: RangeCompatible | undefined;
/** Only include markers contained in this Range in screen coordinates. */
containedInScreenRange?: RangeCompatible | undefined;
/** Only include markers intersecting this Range in buffer coordinates. */
intersectsBufferRange?: RangeCompatible | undefined;
/** Only include markers intersecting this Range in screen coordinates. */
intersectsScreenRange?: RangeCompatible | undefined;
}
interface CopyMarker {
/** Whether or not the marker should be tailed. */
tailed?: boolean | undefined;
/** Creates the marker in a reversed orientation. */
reversed?: boolean | undefined;
/** Determines the rules by which changes to the buffer invalidate the marker. */
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined;
/**
* Indicates whether insertions at the start or end of the marked range should
* be interpreted as happening outside the marker.
*/
exclusive?: boolean | undefined;
/**
* Custom properties to be associated with the marker.
* @deprecated
*/
properties?: object | undefined;
}
interface ScanContext {
/** The number of lines before the matched line to include in the results object. */
leadingContextLineCount?: number | undefined;
/** The number of lines after the matched line to include in the results object. */
trailingContextLineCount?: number | undefined;
}
}
/** The structures that are passed to the user by Atom following specific API calls. */
namespace Structures {
interface TextChange {
newExtent: Point;
oldExtent: Point;
newRange: Range;
oldRange: Range;
newText: string;
oldText: string;
start: Point;
}
interface BufferScanResult {
buffer: TextBuffer;
lineText: string;
match: RegExpExecArray;
matchText: string;
range: Range;
replace(replacementText: string): void;
stop(): void;
stopped: boolean;
}
interface ContextualBufferScanResult extends BufferScanResult {
leadingContextLines: string[];
trailingContextLines: string[];
}
}
/**
* Represents a buffer annotation that remains logically stationary even as
* the buffer changes.
*/
interface Marker {
// Properties
id: number;
tailed: boolean;
reversed: boolean;
valid: boolean;
invalidate: string;
properties: object;
// Lifecycle
/**
* Creates and returns a new Marker with the same properties as this
* marker.
*/
copy(options?: Options.CopyMarker): Marker;
/** Destroys the marker, causing it to emit the "destroyed" event. */
destroy(): void;
// Event Subscription
/** Invoke the given callback when the marker is destroyed. */
onDidDestroy(callback: () => void): Disposable;
/** Invoke the given callback when the state of the marker changes. */
onDidChange(callback: (event: Events.MarkerChanged) => void): Disposable;
// Marker Details
/** Returns the current range of the marker. The range is immutable. */
getRange(): Range;
/** Returns a point representing the marker's current head position. */
getHeadPosition(): Point;
/** Returns a point representing the marker's current tail position. */
getTailPosition(): Point;
/**
* Returns a point representing the start position of the marker, which
* could be the head or tail position, depending on its orientation.
*/
getStartPosition(): Point;
/**
* Returns a point representing the end position of the marker, which
* could be the head or tail position, depending on its orientation.
*/
getEndPosition(): Point;
/** Returns a boolean indicating whether the head precedes the tail. */
isReversed(): boolean;
/** Returns a boolean indicating whether the marker has a tail. */
hasTail(): boolean;
/** Is the marker valid? */
isValid(): boolean;
/** Is the marker destroyed? */
isDestroyed(): boolean;
/**
* Returns a boolean indicating whether changes that occur exactly at
* the marker's head or tail cause it to move.
*/
isExclusive(): boolean;
/** Get the invalidation strategy for this marker. */
getInvalidationStrategy(): string;
// Mutating Markers
/**
* Sets the range of the marker.
* Returns a boolean indicating whether or not the marker was updated.
*/
setRange(range: RangeCompatible, params?: { reversed?: boolean | undefined, exclusive?:
boolean | undefined }): boolean;
/**
* Sets the head position of the marker.
* Returns a boolean indicating whether or not the marker was updated.
*/
setHeadPosition(position: PointCompatible): boolean;
/**
* Sets the tail position of the marker.
* Returns a boolean indicating whether or not the marker was updated.
*/
setTailPosition(position: PointCompatible): boolean;
/**
* Removes the marker's tail.
* Returns a boolean indicating whether or not the marker was updated.
*/
clearTail(): boolean;
/**
* Plants the marker's tail at the current head position.
* Returns a boolean indicating whether or not the marker was updated.
*/
plantTail(): boolean;
// Comparison
/**
* Returns a boolean indicating whether this marker is equivalent to
* another marker, meaning they have the same range and options.
*/
isEqual(other: Marker): boolean;
/**
* Compares this marker to another based on their ranges.
* Returns "-1" if this marker precedes the argument.
* Returns "0" if this marker is equivalent to the argument.
* Returns "1" if this marker follows the argument.
*/
compare(other: Marker): number;
}
/** Experimental: A container for a related set of markers. */
interface MarkerLayer {
// Lifecycle
/** Create a copy of this layer with markers in the same state and locations. */
copy(): MarkerLayer;
/** Destroy this layer. */
destroy(): boolean;
/** Remove all markers from this layer. */
clear(): void;
/** Determine whether this layer has been destroyed. */
isDestroyed(): boolean;
// Querying
/** Get an existing marker by its id. */
getMarker(id: number): Marker|undefined;
/** Get all existing markers on the marker layer. */
getMarkers(): Marker[];
/** Get the number of markers in the marker layer. */
getMarkerCount(): number;
/** Find markers in the layer conforming to the given parameters. */
findMarkers(params: Options.FindMarker): Marker[];
// Marker Creation
/** Create a marker with the given range. */
markRange(range: RangeCompatible, options?: {
reversed?: boolean | undefined,
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined,
}): Marker;
/** Create a marker at with its head at the given position with no tail. */
markPosition(position: PointCompatible, options?: {
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined,
}): Marker;
// Event Subscription
/**
* Subscribe to be notified asynchronously whenever markers are created,
* updated, or destroyed on this layer.
*/
onDidUpdate(callback: () => void): Disposable;
/**
* Subscribe to be notified synchronously whenever markers are created on
* this layer.
*/
onDidCreateMarker(callback: (marker: Marker) => void): Disposable;
/** Subscribe to be notified synchronously when this layer is destroyed. */
onDidDestroy(callback: () => void): Disposable;
}
/**
* Represents a buffer annotation that remains logically stationary even as the
* buffer changes. This is used to represent cursors, folds, snippet targets,
* misspelled words, and anything else that needs to track a logical location
* in the buffer over time.
*/
interface DisplayMarker {
// Construction and Destruction
/**
* Destroys the marker, causing it to emit the 'destroyed' event. Once destroyed,
* a marker cannot be restored by undo/redo operations.
*/
destroy(): void;
/** Creates and returns a new DisplayMarker with the same properties as this marker. */
copy(options?: Options.CopyMarker): DisplayMarker;
// Event Subscription
/** Invoke the given callback when the state of the marker changes. */
onDidChange(callback: (event: Events.DisplayMarkerChanged) => void):
Disposable;
/** Invoke the given callback when the marker is destroyed. */
onDidDestroy(callback: () => void): Disposable;
// TextEditorMarker Details
/**
* Returns a boolean indicating whether the marker is valid. Markers can be
* invalidated when a region surrounding them in the buffer is changed.
*/
isValid(): boolean;
/**
* Returns a boolean indicating whether the marker has been destroyed. A marker
* can be invalid without being destroyed, in which case undoing the invalidating
* operation would restore the marker.
*/
isDestroyed(): boolean;
/** Returns a boolean indicating whether the head precedes the tail. */
isReversed(): boolean;
/**
* Returns a boolean indicating whether changes that occur exactly at the marker's
* head or tail cause it to move.
*/
isExclusive(): boolean;
/**
* Get the invalidation strategy for this marker.
* Valid values include: never, surround, overlap, inside, and touch.
*/
getInvalidationStrategy(): string;
/** Returns an Object containing any custom properties associated with the marker. */
getProperties(): object;
/** Merges an Object containing new properties into the marker's existing properties. */
setProperties(properties: object): void;
/** Returns whether this marker matches the given parameters. */
matchesProperties(attributes: Options.FindDisplayMarker): boolean;
// Comparing to other markers
/** Compares this marker to another based on their ranges. */
compare(other: DisplayMarker): number;
/**
* Returns a boolean indicating whether this marker is equivalent to another
* marker, meaning they have the same range and options.
*/
isEqual(other: DisplayMarker): boolean;
// Managing the marker's range
/** Gets the buffer range of this marker. */
getBufferRange(): Range;
/** Gets the screen range of this marker. */
getScreenRange(): Range;
/** Modifies the buffer range of this marker. */
setBufferRange(bufferRange: RangeCompatible, properties?: { reversed: boolean }):
void;
/** Modifies the screen range of this marker. */
setScreenRange(screenRange: RangeCompatible, options?: { reversed?: boolean | undefined,
clipDirection?: "backward"|"forward"|"closest" | undefined }): void;
/**
* Retrieves the screen position of the marker's start. This will always be
* less than or equal to the result of DisplayMarker::getEndScreenPosition.
*/
getStartScreenPosition(options?: { clipDirection: "backward"|"forward"|"closest" }):
Point;
/**
* Retrieves the screen position of the marker's end. This will always be
* greater than or equal to the result of DisplayMarker::getStartScreenPosition.
*/
getEndScreenPosition(options?: { clipDirection: "backward"|"forward"|"closest" }):
Point;
/** Retrieves the buffer position of the marker's head. */
getHeadBufferPosition(): Point;
/** Sets the buffer position of the marker's head. */
setHeadBufferPosition(bufferPosition: PointCompatible): void;
/** Retrieves the screen position of the marker's head. */
getHeadScreenPosition(options?: { clipDirection: "backward"|"forward"|"closest" }):
Point;
/** Sets the screen position of the marker's head. */
setHeadScreenPosition(screenPosition: PointCompatible,
options?: { clipDirection: "backward"|"forward"|"closest" }): void;
/** Retrieves the buffer position of the marker's tail. */
getTailBufferPosition(): Point;
/** Sets the buffer position of the marker's tail. */
setTailBufferPosition(bufferPosition: PointCompatible): void;
/** Retrieves the screen position of the marker's tail. */
getTailScreenPosition(options?: { clipDirection: "backward"|"forward"|"closest" }):
Point;
/** Sets the screen position of the marker's tail. */
setTailScreenPosition(screenPosition: PointCompatible,
options?: { clipDirection: "backward"|"forward"|"closest" }): void;
/**
* Retrieves the buffer position of the marker's start. This will always be less
* than or equal to the result of DisplayMarker::getEndBufferPosition.
*/
getStartBufferPosition(): Point;
/**
* Retrieves the buffer position of the marker's end. This will always be greater
* than or equal to the result of DisplayMarker::getStartBufferPosition.
*/
getEndBufferPosition(): Point;
/** Returns a boolean indicating whether the marker has a tail. */
hasTail(): boolean;
/**
* Plants the marker's tail at the current head position. After calling the
* marker's tail position will be its head position at the time of the call,
* regardless of where the marker's head is moved.
*/
plantTail(): void;
/**
* Removes the marker's tail. After calling the marker's head position will be
* reported as its current tail position until the tail is planted again.
*/
clearTail(): void;
}
/**
* Experimental: A container for a related set of markers at the DisplayLayer level.
* Wraps an underlying MarkerLayer on the TextBuffer.
*
* This API is experimental and subject to change on any release.
*/
interface DisplayMarkerLayer {
// Lifecycle
/** Destroy this layer. */
destroy(): void;
/** Destroy all markers in this layer. */
clear(): void;
/** Determine whether this layer has been destroyed. */
isDestroyed(): boolean;
// Event Subscription
/** Subscribe to be notified synchronously when this layer is destroyed. */
onDidDestroy(callback: () => void): Disposable;
/**
* Subscribe to be notified asynchronously whenever markers are created, updated,
* or destroyed on this layer. Prefer this method for optimal performance when
* interacting with layers that could contain large numbers of markers.
*/
onDidUpdate(callback: () => void): Disposable;
/**
* Subscribe to be notified synchronously whenever markers are created on this
* layer. Avoid this method for optimal performance when interacting with layers
* that could contain large numbers of markers.
*/
onDidCreateMarker(callback: (marker: DisplayMarker|Marker) => void): Disposable;
// Marker creation
/** Create a marker with the given screen range. */
markScreenRange(range: RangeCompatible, options?: {
reversed?: boolean | undefined,
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined,
clipDirection?: "backward"|"forward"|"closest" | undefined
}): DisplayMarker;
/**
* Create a marker on this layer with its head at the given screen position
* and no tail.
*/
markScreenPosition(screenPosition: PointCompatible, options?: {
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined,
clipDirection?: "backward"|"forward"|"closest" | undefined
}): DisplayMarker;
/** Create a marker with the given buffer range. */
markBufferRange(range: RangeCompatible, options?: {
reversed?: boolean | undefined,
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined
}): DisplayMarker;
/**
* Create a marker on this layer with its head at the given buffer position
* and no tail.
*/
markBufferPosition(bufferPosition: PointCompatible, options?: {
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined
}): DisplayMarker;
// Querying
/** Get an existing marker by its id. */
getMarker(id: number): DisplayMarker;
/** Get all markers in the layer. */
getMarkers(): DisplayMarker[];
/** Get the number of markers in the marker layer. */
getMarkerCount(): number;
/**
* Find markers in the layer conforming to the given parameters.
*
* This method finds markers based on the given properties. Markers can be associated
* with custom properties that will be compared with basic equality. In addition,
* there are several special properties that will be compared with the range of the
* markers rather than their properties.
*/
findMarkers(properties: Options.FindDisplayMarker): DisplayMarker[];
}
/** Represents a point in a buffer in row/column coordinates. */
interface Point {
// Properties
/** A zero-indexed number representing the row of the Point. */
row: number;
/** A zero-indexed number representing the column of the Point. */
column: number;
// Construction
/** Returns a new Point with the same row and column. */
copy(): Point;
/** Returns a new Point with the row and column negated. */
negate(): Point;
// Comparison
/**
* Compare another Point to this Point instance.
* Returns -1 if this point precedes the argument.
* Returns 0 if this point is equivalent to the argument.
* Returns 1 if this point follows the argument.
*/
compare(other: PointCompatible): number;
/**
* Returns a boolean indicating whether this point has the same row and
* column as the given Point.
*/
isEqual(other: PointCompatible): boolean;
/** Returns a Boolean indicating whether this point precedes the given Point. */
isLessThan(other: PointCompatible): boolean;
/**
* Returns a Boolean indicating whether this point precedes or is equal to
* the given Point.
*/
isLessThanOrEqual(other: PointCompatible): boolean;
/** Returns a Boolean indicating whether this point follows the given Point. */
isGreaterThan(other: PointCompatible): boolean;
/**
* Returns a Boolean indicating whether this point follows or is equal to
* the given Point.
*/
isGreaterThanOrEqual(other: PointCompatible): boolean;
// Operations
/** Makes this point immutable and returns itself. */
freeze(): Readonly<Point>;
/**
* Build and return a new point by adding the rows and columns of the
* given point.
*/
translate(other: PointCompatible): Point;
/**
* Build and return a new Point by traversing the rows and columns
* specified by the given point.
*/
traverse(other: PointCompatible): Point;
/** Returns an array of this point's row and column. */
toArray(): [number, number];
/** Returns an array of this point's row and column. */
serialize(): [number, number];
/** Returns a string representation of the point. */
toString(): string;
}
/** The static side to the Point class. */
interface PointStatic {
/**
* Create a Point from an array containing two numbers representing the
* row and column.
*/
fromObject(object: PointCompatible, copy?: boolean): Point;
/** Construct a Point object */
new (row?: number, column?: number): Point;
/** Returns the given Point that is earlier in the buffer. */
min(point1: PointCompatible, point2: PointCompatible): Point;
}
/** The types usable when constructing a point via the Point::fromObject method. */
type PointCompatible = PointLike|[number, number];
/** The interface that should be implemented for all "point-compatible" objects. */
interface PointLike {
/** A zero-indexed number representing the row of the Point. */
row: number;
/** A zero-indexed number representing the column of the Point. */
column: number;
}
/** Represents a region in a buffer in row/column coordinates. */
interface Range {
// Properties
/** A Point representing the start of the Range. */
start: PointLike;
/** A Point representing the end of the Range. */
end: PointLike;
// Construction
/** Returns a new range with the same start and end positions. */
copy(): Range;
/** Returns a new range with the start and end positions negated. */
negate(): Range;
// Serialization and Deserialization
/** Returns a plain javascript object representation of the range. */
serialize(): number[][];
// Range Details
/** Is the start position of this range equal to the end position? */
isEmpty(): boolean;
/**
* Returns a boolean indicating whether this range starts and ends on the
* same row.
*/
isSingleLine(): boolean;
/** Get the number of rows in this range. */
getRowCount(): number;
/** Returns an array of all rows in the range. */
getRows(): number[];
// Operations
/**
* Freezes the range and its start and end point so it becomes immutable
* and returns itself.
*/
freeze(): Readonly<Range>;
// NOTE: this function doesn't actually take a range-compatible parameter.
/** Returns a new range that contains this range and the given range. */
union(other: RangeLike): Range;
/**
* Build and return a new range by translating this range's start and end
* points by the given delta(s).
*/
translate(startDelta: PointCompatible, endDelta?: PointCompatible): Range;
/**
* Build and return a new range by traversing this range's start and end
* points by the given delta.
*/
traverse(delta: PointCompatible): Range;
// Comparison
/**
* Compare two Ranges.
* Returns -1 if this range starts before the argument or contains it.
* Returns 0 if this range is equivalent to the argument.
* Returns 1 if this range starts after the argument or is contained by it.
*/
compare(otherRange: RangeCompatible): number;
/**
* Returns a Boolean indicating whether this range has the same start and
* end points as the given Range.
*/
isEqual(otherRange: RangeCompatible): boolean;
// NOTE: this function doesn't actually take a range-compatible parameter.
/**
* Returns a Boolean indicating whether this range starts and ends on the
* same row as the argument.
*/
coversSameRows(otherRange: RangeLike): boolean;
// NOTE: this function doesn't actually take a range-compatible parameter.
/** Determines whether this range intersects with the argument. */
intersectsWith(otherRange: RangeLike, exclusive?: boolean): boolean;
/** Returns a boolean indicating whether this range contains the given range. */
containsRange(otherRange: RangeCompatible, exclusive?: boolean): boolean;
/** Returns a boolean indicating whether this range contains the given point. */
containsPoint(point: PointCompatible, exclusive?: boolean): boolean;
/**
* Returns a boolean indicating whether this range intersects the given
* row number.
*/
intersectsRow(row: number): boolean;
/**
* Returns a boolean indicating whether this range intersects the row range
* indicated by the given startRow and endRow numbers.
*/
intersectsRowRange(startRow: number, endRow: number): boolean;
// Conversion
/** Returns a string representation of the range. */
toString(): string;
}
/** The static side to the Range class. */
interface RangeStatic {
/** Convert any range-compatible object to a Range. */
fromObject(object: RangeCompatible, copy?: boolean): Range;
/** Construct a Range object. */
new (pointA?: PointCompatible, pointB?: PointCompatible): Range;
/** Call this with the result of Range::serialize to construct a new Range. */
deserialize(array: object): Range;
}
/** The types usable when constructing a range via the Range::fromObject method. */
type RangeCompatible =
| RangeLike
| [PointLike, PointLike]
| [PointLike, [number, number]]
| [[number, number], PointLike]
| [[number, number], [number, number]];
/** The interface that should be implemented for all "range-compatible" objects. */
interface RangeLike {
/** A Point representing the start of the Range. */
start: PointLike;
/** A Point representing the end of the Range. */
end: PointLike;
}
/**
* A mutable text container with undo/redo support and the ability to
* annotate logical regions in the text.
*/
interface TextBuffer {
// Properties
file: File;
lines: string[];
lineEndings: string[];
stoppedChangingDelay: number;
conflict: boolean;
loaded: boolean;
destroyed: boolean;
refcount: number;
id: string;
/**
* Schedules a 'did-stop-changing' emission. The event will be emitted between
* now and TextBuffer::stoppedChangingDelay milliseconds in the future.
*/
debouncedEmitDidStopChangingEvent(): void;
// Lifecycle
/** Destroys the buffer, emitting the 'did-destroy' prior to doing so. */
destroy(): void;
/** Returns whether or not the given buffer is alive. */
isAlive(): boolean;
/** Returns whether or not the given buffer has been destroyed. */
isDestroyed(): boolean;
/** Returns whether or not this text buffer is currently retained. */
isRetained(): boolean;
/** Retains the text buffer, preventing its destruction via TextBuffer::release. */
retain(): TextBuffer;
/** Release the text buffer, destroying it if there are no other retainers. */
release(): TextBuffer;
// Event Subscription
/**
* Invoke the given callback synchronously before the content of the buffer
* changes.
*/
onWillChange(callback: (event: Events.BufferChanging) => void): Disposable;
/**
* Invoke the given callback synchronously when the content of the buffer
* changes. You should probably not be using this in packages.
*/
onDidChange(callback: (event: Events.BufferChanged) => void): Disposable;
/**
* Invoke the given callback synchronously when a transaction finishes with
* a list of all the changes in the transaction.
*/
onDidChangeText(callback: (event: Events.BufferStoppedChanging) => void):
Disposable;
/**
* Invoke the given callback asynchronously following one or more changes after
* ::getStoppedChangingDelay milliseconds elapse without an additional change.
*/
onDidStopChanging(callback: (event: Events.BufferStoppedChanging) => void):
Disposable;
/**
* Invoke the given callback when the in-memory contents of the buffer become
* in conflict with the contents of the file on disk.
*/
onDidConflict(callback: () => void): Disposable;
/** Invoke the given callback if the value of ::isModified changes. */
onDidChangeModified(callback: (modified: boolean) => void): Disposable;
/**
* Invoke the given callback when all marker ::onDidChange observers have been
* notified following a change to the buffer.
*/
onDidUpdateMarkers(callback: () => void): Disposable;
onDidCreateMarker(callback: (marker: Marker) => void): Disposable;
/** Invoke the given callback when the value of ::getPath changes. */
onDidChangePath(callback: (path: string) => void): Disposable;
/** Invoke the given callback when the value of ::getEncoding changes. */
onDidChangeEncoding(callback: (encoding: string) => void): Disposable;
/**
* Invoke the given callback before the buffer is saved to disk. If the
* given callback returns a promise, then the buffer will not be saved until
* the promise resolves.
*/
onWillSave(callback: () => Promise<void>|void): Disposable;
/** Invoke the given callback after the buffer is saved to disk. */
onDidSave(callback: (event: Events.FileSaved) => void): Disposable;
/** Invoke the given callback after the file backing the buffer is deleted. */
onDidDelete(callback: () => void): Disposable;
/**
* Invoke the given callback before the buffer is reloaded from the contents
* of its file on disk.
*/
onWillReload(callback: () => void): Disposable;
/**
* Invoke the given callback after the buffer is reloaded from the contents
* of its file on disk.
*/
onDidReload(callback: () => void): Disposable;
/** Invoke the given callback when the buffer is destroyed. */
onDidDestroy(callback: () => void): Disposable;
/** Invoke the given callback when there is an error in watching the file. */
onWillThrowWatchError(callback: (errorObject: Events.BufferWatchError) =>
void): Disposable;
/**
* Get the number of milliseconds that will elapse without a change before
* ::onDidStopChanging observers are invoked following a change.
*/
getStoppedChangingDelay(): number;
/** Performs the necessary work, then emits the 'did-stop-changing' event. */
emitDidStopChangingEvent(): void;
// File Details
/**
* Determine if the in-memory contents of the buffer differ from its contents
* on disk.
* If the buffer is unsaved, always returns true unless the buffer is empty.
*/
isModified(): boolean;
/**
* Determine if the in-memory contents of the buffer conflict with the on-disk
* contents of its associated file.
*/
isInConflict(): boolean;
/** Get the path of the associated file. */
getPath(): string|undefined;
/** Set the path for the buffer's associated file. */
setPath(filePath: string): void;
/** Sets the character set encoding for this buffer. */
setEncoding(encoding: string): void;
/** Returns the string encoding of this buffer. */
getEncoding(): string;
/** Get the path of the associated file. */
getUri(): string;
/** Identifies if the buffer belongs to multiple editors. */
hasMultipleEditors(): boolean;
// Reading Text
/** Determine whether the buffer is empty. */
isEmpty(): boolean;
/** Get the entire text of the buffer. */
getText(): string;
/** Get the text in a range. */
getTextInRange(range: RangeCompatible): string;
/** Get the text of all lines in the buffer, without their line endings. */
getLines(): string[];
/** Get the text of the last line of the buffer, without its line ending. */
getLastLine(): string;
/** Get the text of the line at the given row, without its line ending. */
lineForRow(row: number): string|undefined;
/** Get the line ending for the given 0-indexed row. */
lineEndingForRow(row: number): string|undefined;
/**
* Get the length of the line for the given 0-indexed row, without its line
* ending.
*/
lineLengthForRow(row: number): number;
/** Determine if the given row contains only whitespace. */
isRowBlank(row: number): boolean;
/**
* Given a row, find the first preceding row that's not blank.
* Returns a number or null if there's no preceding non-blank row.
*/
previousNonBlankRow(startRow: number): number|null;
/**
* Given a row, find the next row that's not blank.
* Returns a number or null if there's no next non-blank row.
*/
nextNonBlankRow(startRow: number): number|null;
// Mutating Text
/** Replace the entire contents of the buffer with the given text. */
setText(text: string): Range;
/**
* Replace the current buffer contents by applying a diff based on the
* given text.
*/
setTextViaDiff(text: string): void;
/** Set the text in the given range. */
setTextInRange(range: RangeCompatible, text: string, options?:
{ normalizeLineEndings?: boolean | undefined, undo?: "skip" | undefined }): Range;
/** Insert text at the given position. */
insert(position: PointCompatible, text: string, options?:
{ normalizeLineEndings?: boolean | undefined, undo?: "skip" | undefined }): Range;
/** Append text to the end of the buffer. */
append(text: string, options?: { normalizeLineEndings?: boolean | undefined, undo?:
"skip" | undefined }): Range;
/** Delete the text in the given range. */
delete(range: RangeCompatible): Range;
/** Delete the line associated with a specified row. */
deleteRow(row: number): Range;
/** Delete the lines associated with the specified row range. */
deleteRows(startRow: number, endRow: number): Range;
// Markers
/** Create a layer to contain a set of related markers. */
addMarkerLayer(options?: {
maintainHistory?: boolean | undefined,
persistent?: boolean | undefined
}): MarkerLayer;
/**
* Get a MarkerLayer by id.
* Returns a MarkerLayer or `` if no layer exists with the given id.
*/
getMarkerLayer(id: string): MarkerLayer|undefined;
/** Get the default MarkerLayer. */
getDefaultMarkerLayer(): MarkerLayer;
/** Create a marker with the given range in the default marker layer. */
markRange(range: RangeCompatible, properties?: { reversed?: boolean | undefined,
invalidate?: "never"|"surround"|"overlap"|"inside"|"touch" | undefined,
exclusive?: boolean | undefined }): Marker;
/** Create a marker at the given position with no tail in the default marker layer. */
markPosition(position: PointCompatible, options?: { invalidate?: "never"|"surround"
|"overlap"|"inside"|"touch" | undefined, exclusive?: boolean | undefined }): Marker;
/** Get all existing markers on the default marker layer. */
getMarkers(): Marker[];
/** Get an existing marker by its id from the default marker layer. */
getMarker(id: number): Marker;
/** Find markers conforming to the given parameters in the default marker layer. */
findMarkers(params: Options.FindMarker): Marker[];
/** Get the number of markers in the default marker layer. */
getMarkerCount(): number;
// History
/** Undo the last operation. If a transaction is in progress, aborts it. */
undo(): boolean;
/** Redo the last operation. */
redo(): boolean;
/** Batch multiple operations as a single undo/redo step. */
transact<T>(groupingInterval: number, fn: () => T): T;
transact<T>(fn: () => T): T;
/**
* Call within a transaction to terminate the function's execution and
* revert any changes performed up to the abortion.
*/
abortTransaction(): void;
/**
* Clear the undo stack. When calling this method within a transaction,
* the ::onDidChangeText event will not be triggered because the information
* describing the changes is lost.
*/
clearUndoStack(): void;
/**
* Create a pointer to the current state of the buffer for use with
* ::revertToCheckpoint and ::groupChangesSinceCheckpoint.
*/
createCheckpoint(): number;
/**
* Revert the buffer to the state it was in when the given checkpoint was created.
* Returns a boolean indicating whether the operation succeeded.
*/
revertToCheckpoint(checkpoint: number): boolean;
/**
* Group all changes since the given checkpoint into a single transaction for
* purposes of undo/redo.
* Returns a boolean indicating whether the operation succeeded.
*/
groupChangesSinceCheckpoint(checkpoint: number): boolean;
/**
* Returns a list of changes since the given checkpoint.
* If the given checkpoint is no longer present in the undo history, this method
* will return an empty Array.
*/
getChangesSinceCheckpoint(checkpoint: number): Array<{
/** A Point representing where the change started. */
start: Point,
/** A Point representing the replaced extent. */
oldExtent: Point,
/** A Point representing the replacement extent. */
newExtent: Point,
/** A String representing the replacement text. */
newText: string
}>;
// Search and Replace
/**
* Scan regular expression matches in the entire buffer, calling the given
* iterator function on each match.
*/
scan(regex: RegExp, iterator: (params: Structures.BufferScanResult) => void): void;
/**
* Scan regular expression matches in the entire buffer, calling the given
* iterator function on each match.
*/
scan(regex: RegExp, options: Options.ScanContext, iterator: (params:
Structures.ContextualBufferScanResult) => void): void;
/**
* Scan regular expression matches in the entire buffer in reverse order,
* calling the given iterator function on each match.
*/
backwardsScan(regex: RegExp, iterator: (params: Structures.BufferScanResult) => void):
void;
/**
* Scan regular expression matches in the entire buffer in reverse order,
* calling the given iterator function on each match.
*/
backwardsScan(regex: RegExp, options: Options.ScanContext, iterator: (params:
Structures.ContextualBufferScanResult) => void): void;
/**
* Scan regular expression matches in a given range , calling the given
* iterator function on each match.
*/
scanInRange(regex: RegExp, range: RangeCompatible, iterator:
(params: Structures.BufferScanResult) => void): void;
/**
* Scan regular expression matches in a given range , calling the given
* iterator function on each match.
*/
scanInRange(regex: RegExp, range: RangeCompatible, options: Options.ScanContext,
iterator: (params: Structures.ContextualBufferScanResult) => void): void;
/**
* Scan regular expression matches in a given range in reverse order,
* calling the given iterator function on each match.
*/
backwardsScanInRange(regex: RegExp, range: RangeCompatible, iterator:
(params: Structures.BufferScanResult) => void): void;
/**
* Scan regular expression matches in a given range in reverse order,
* calling the given iterator function on each match.
*/
backwardsScanInRange(regex: RegExp, range: RangeCompatible, options:
Options.ScanContext, iterator:
(params: Structures.ContextualBufferScanResult) => void): void;
/** Replace all regular expression matches in the entire buffer. */
replace(regex: RegExp, replacementText: string): number;
// Buffer Range Details
/** Get the range spanning from [0, 0] to ::getEndPosition. */
getRange(): Range;
/** Get the number of lines in the buffer. */
getLineCount(): number;
/** Get the last 0-indexed row in the buffer. */
getLastRow(): number;
/** Get the first position in the buffer, which is always [0, 0]. */
getFirstPosition(): Point;
/** Get the maximal position in the buffer, where new text would be appended. */
getEndPosition(): Point;
/** Get the length of the buffer in characters. */
getMaxCharacterIndex(): number;
/** Get the range for the given row. */
rangeForRow(row: number, includeNewline: boolean): Range;
/**
* Convert a position in the buffer in row/column coordinates to an absolute
* character offset, inclusive of line ending characters.
*/
characterIndexForPosition(position: Point|[number, number]): number;
/**
* Convert an absolute character offset, inclusive of newlines, to a position
* in the buffer in row/column coordinates.
*/
positionForCharacterIndex(offset: number): Point;
/** Clip the given range so it starts and ends at valid positions. */
clipRange(range: RangeCompatible): Range;
/** Clip the given point so it is at a valid position in the buffer. */
clipPosition(position: PointCompatible): Point;
// Buffer Operations
/** Save the buffer. */
save(): Promise<void>;
/** Save the buffer at a specific path. */
saveAs(filePath: string): Promise<void>;
/** Reload the buffer's contents from disk. */
reload(): void;
}
/** The static side to the TextBuffer class. */
interface TextBufferStatic {
Point: PointStatic;
Range: RangeStatic;
/** Create a new buffer backed by the given file path. */
load(source: string, params?: Options.BufferLoad): Promise<TextBuffer>;
/**
* Create a new buffer backed by the given file path. For better performance,
* use TextBuffer.load instead.
*/
loadSync(filePath: string, params?: Options.BufferLoad): TextBuffer;
/**
* Restore a TextBuffer based on an earlier state created using the
* TextBuffer::serialize method.
*/
deserialize(params: object): Promise<TextBuffer>;
/** Create a new buffer with the given starting text. */
new (text: string): TextBuffer;
/** Create a new buffer with the given params. */
new (params?: {
/** The initial string text of the buffer. */
text?: string | undefined
/**
* A function that returns a Boolean indicating whether the buffer should
* be destroyed if its file is deleted.
*/
shouldDestroyOnFileDelete?(): boolean
}): TextBuffer;
}
}
}
declare const tb: TextBuffer.TextBufferStatic;
export = tb; | the_stack |
declare module "constants" {
/** @deprecated since v6.3.0 - use `os.constants.errno.E2BIG` instead. */
const E2BIG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EACCES` instead. */
const EACCES: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EADDRINUSE` instead. */
const EADDRINUSE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EADDRNOTAVAIL` instead. */
const EADDRNOTAVAIL: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EAFNOSUPPORT` instead. */
const EAFNOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EAGAIN` instead. */
const EAGAIN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EALREADY` instead. */
const EALREADY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EBADF` instead. */
const EBADF: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EBADMSG` instead. */
const EBADMSG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EBUSY` instead. */
const EBUSY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ECANCELED` instead. */
const ECANCELED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ECHILD` instead. */
const ECHILD: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNABORTED` instead. */
const ECONNABORTED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNREFUSED` instead. */
const ECONNREFUSED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNRESET` instead. */
const ECONNRESET: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EDEADLK` instead. */
const EDEADLK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EDESTADDRREQ` instead. */
const EDESTADDRREQ: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EDOM` instead. */
const EDOM: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EEXIST` instead. */
const EEXIST: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EFAULT` instead. */
const EFAULT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EFBIG` instead. */
const EFBIG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EHOSTUNREACH` instead. */
const EHOSTUNREACH: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EIDRM` instead. */
const EIDRM: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EILSEQ` instead. */
const EILSEQ: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EINPROGRESS` instead. */
const EINPROGRESS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EINTR` instead. */
const EINTR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EINVAL` instead. */
const EINVAL: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EIO` instead. */
const EIO: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EISCONN` instead. */
const EISCONN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EISDIR` instead. */
const EISDIR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ELOOP` instead. */
const ELOOP: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EMFILE` instead. */
const EMFILE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EMLINK` instead. */
const EMLINK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EMSGSIZE` instead. */
const EMSGSIZE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENAMETOOLONG` instead. */
const ENAMETOOLONG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETDOWN` instead. */
const ENETDOWN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETRESET` instead. */
const ENETRESET: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETUNREACH` instead. */
const ENETUNREACH: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENFILE` instead. */
const ENFILE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOBUFS` instead. */
const ENOBUFS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENODATA` instead. */
const ENODATA: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENODEV` instead. */
const ENODEV: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOENT` instead. */
const ENOENT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOEXEC` instead. */
const ENOEXEC: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOLCK` instead. */
const ENOLCK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOLINK` instead. */
const ENOLINK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOMEM` instead. */
const ENOMEM: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOMSG` instead. */
const ENOMSG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOPROTOOPT` instead. */
const ENOPROTOOPT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSPC` instead. */
const ENOSPC: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSR` instead. */
const ENOSR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSTR` instead. */
const ENOSTR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSYS` instead. */
const ENOSYS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTCONN` instead. */
const ENOTCONN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTDIR` instead. */
const ENOTDIR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTEMPTY` instead. */
const ENOTEMPTY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTSOCK` instead. */
const ENOTSOCK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTSUP` instead. */
const ENOTSUP: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTTY` instead. */
const ENOTTY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ENXIO` instead. */
const ENXIO: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EOPNOTSUPP` instead. */
const EOPNOTSUPP: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EOVERFLOW` instead. */
const EOVERFLOW: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EPERM` instead. */
const EPERM: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EPIPE` instead. */
const EPIPE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTO` instead. */
const EPROTO: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTONOSUPPORT` instead. */
const EPROTONOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTOTYPE` instead. */
const EPROTOTYPE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ERANGE` instead. */
const ERANGE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EROFS` instead. */
const EROFS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ESPIPE` instead. */
const ESPIPE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ESRCH` instead. */
const ESRCH: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ETIME` instead. */
const ETIME: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ETIMEDOUT` instead. */
const ETIMEDOUT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.ETXTBSY` instead. */
const ETXTBSY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EWOULDBLOCK` instead. */
const EWOULDBLOCK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.EXDEV` instead. */
const EXDEV: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINTR` instead. */
const WSAEINTR: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEBADF` instead. */
const WSAEBADF: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEACCES` instead. */
const WSAEACCES: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEFAULT` instead. */
const WSAEFAULT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVAL` instead. */
const WSAEINVAL: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEMFILE` instead. */
const WSAEMFILE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEWOULDBLOCK` instead. */
const WSAEWOULDBLOCK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINPROGRESS` instead. */
const WSAEINPROGRESS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEALREADY` instead. */
const WSAEALREADY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTSOCK` instead. */
const WSAENOTSOCK: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDESTADDRREQ` instead. */
const WSAEDESTADDRREQ: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEMSGSIZE` instead. */
const WSAEMSGSIZE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROTOTYPE` instead. */
const WSAEPROTOTYPE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOPROTOOPT` instead. */
const WSAENOPROTOOPT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROTONOSUPPORT` instead. */
const WSAEPROTONOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESOCKTNOSUPPORT` instead. */
const WSAESOCKTNOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEOPNOTSUPP` instead. */
const WSAEOPNOTSUPP: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPFNOSUPPORT` instead. */
const WSAEPFNOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEAFNOSUPPORT` instead. */
const WSAEAFNOSUPPORT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEADDRINUSE` instead. */
const WSAEADDRINUSE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEADDRNOTAVAIL` instead. */
const WSAEADDRNOTAVAIL: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETDOWN` instead. */
const WSAENETDOWN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETUNREACH` instead. */
const WSAENETUNREACH: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETRESET` instead. */
const WSAENETRESET: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNABORTED` instead. */
const WSAECONNABORTED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNRESET` instead. */
const WSAECONNRESET: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOBUFS` instead. */
const WSAENOBUFS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEISCONN` instead. */
const WSAEISCONN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTCONN` instead. */
const WSAENOTCONN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESHUTDOWN` instead. */
const WSAESHUTDOWN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAETOOMANYREFS` instead. */
const WSAETOOMANYREFS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAETIMEDOUT` instead. */
const WSAETIMEDOUT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNREFUSED` instead. */
const WSAECONNREFUSED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAELOOP` instead. */
const WSAELOOP: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENAMETOOLONG` instead. */
const WSAENAMETOOLONG: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEHOSTDOWN` instead. */
const WSAEHOSTDOWN: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEHOSTUNREACH` instead. */
const WSAEHOSTUNREACH: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTEMPTY` instead. */
const WSAENOTEMPTY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROCLIM` instead. */
const WSAEPROCLIM: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEUSERS` instead. */
const WSAEUSERS: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDQUOT` instead. */
const WSAEDQUOT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESTALE` instead. */
const WSAESTALE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEREMOTE` instead. */
const WSAEREMOTE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASYSNOTREADY` instead. */
const WSASYSNOTREADY: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAVERNOTSUPPORTED` instead. */
const WSAVERNOTSUPPORTED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSANOTINITIALISED` instead. */
const WSANOTINITIALISED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDISCON` instead. */
const WSAEDISCON: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOMORE` instead. */
const WSAENOMORE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECANCELLED` instead. */
const WSAECANCELLED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVALIDPROCTABLE` instead. */
const WSAEINVALIDPROCTABLE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVALIDPROVIDER` instead. */
const WSAEINVALIDPROVIDER: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROVIDERFAILEDINIT` instead. */
const WSAEPROVIDERFAILEDINIT: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASYSCALLFAILURE` instead. */
const WSASYSCALLFAILURE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASERVICE_NOT_FOUND` instead. */
const WSASERVICE_NOT_FOUND: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSATYPE_NOT_FOUND` instead. */
const WSATYPE_NOT_FOUND: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSA_E_NO_MORE` instead. */
const WSA_E_NO_MORE: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSA_E_CANCELLED` instead. */
const WSA_E_CANCELLED: number;
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEREFUSED` instead. */
const WSAEREFUSED: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGHUP` instead. */
const SIGHUP: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGINT` instead. */
const SIGINT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGILL` instead. */
const SIGILL: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGABRT` instead. */
const SIGABRT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGFPE` instead. */
const SIGFPE: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGKILL` instead. */
const SIGKILL: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSEGV` instead. */
const SIGSEGV: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTERM` instead. */
const SIGTERM: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGBREAK` instead. */
const SIGBREAK: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGWINCH` instead. */
const SIGWINCH: number;
const SSL_OP_ALL: number;
const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number;
const SSL_OP_CIPHER_SERVER_PREFERENCE: number;
const SSL_OP_CISCO_ANYCONNECT: number;
const SSL_OP_COOKIE_EXCHANGE: number;
const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number;
const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number;
const SSL_OP_EPHEMERAL_RSA: number;
const SSL_OP_LEGACY_SERVER_CONNECT: number;
const SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number;
const SSL_OP_MICROSOFT_SESS_ID_BUG: number;
const SSL_OP_MSIE_SSLV2_RSA_PADDING: number;
const SSL_OP_NETSCAPE_CA_DN_BUG: number;
const SSL_OP_NETSCAPE_CHALLENGE_BUG: number;
const SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number;
const SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number;
const SSL_OP_NO_COMPRESSION: number;
const SSL_OP_NO_QUERY_MTU: number;
const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number;
const SSL_OP_NO_SSLv2: number;
const SSL_OP_NO_SSLv3: number;
const SSL_OP_NO_TICKET: number;
const SSL_OP_NO_TLSv1: number;
const SSL_OP_NO_TLSv1_1: number;
const SSL_OP_NO_TLSv1_2: number;
const SSL_OP_PKCS1_CHECK_1: number;
const SSL_OP_PKCS1_CHECK_2: number;
const SSL_OP_SINGLE_DH_USE: number;
const SSL_OP_SINGLE_ECDH_USE: number;
const SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number;
const SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number;
const SSL_OP_TLS_BLOCK_PADDING_BUG: number;
const SSL_OP_TLS_D5_BUG: number;
const SSL_OP_TLS_ROLLBACK_BUG: number;
const ENGINE_METHOD_DSA: number;
const ENGINE_METHOD_DH: number;
const ENGINE_METHOD_RAND: number;
const ENGINE_METHOD_ECDH: number;
const ENGINE_METHOD_ECDSA: number;
const ENGINE_METHOD_CIPHERS: number;
const ENGINE_METHOD_DIGESTS: number;
const ENGINE_METHOD_STORE: number;
const ENGINE_METHOD_PKEY_METHS: number;
const ENGINE_METHOD_PKEY_ASN1_METHS: number;
const ENGINE_METHOD_ALL: number;
const ENGINE_METHOD_NONE: number;
const DH_CHECK_P_NOT_SAFE_PRIME: number;
const DH_CHECK_P_NOT_PRIME: number;
const DH_UNABLE_TO_CHECK_GENERATOR: number;
const DH_NOT_SUITABLE_GENERATOR: number;
const RSA_PKCS1_PADDING: number;
const RSA_SSLV23_PADDING: number;
const RSA_NO_PADDING: number;
const RSA_PKCS1_OAEP_PADDING: number;
const RSA_X931_PADDING: number;
const RSA_PKCS1_PSS_PADDING: number;
const POINT_CONVERSION_COMPRESSED: number;
const POINT_CONVERSION_UNCOMPRESSED: number;
const POINT_CONVERSION_HYBRID: number;
const O_RDONLY: number;
const O_WRONLY: number;
const O_RDWR: number;
const S_IFMT: number;
const S_IFREG: number;
const S_IFDIR: number;
const S_IFCHR: number;
const S_IFBLK: number;
const S_IFIFO: number;
const S_IFSOCK: number;
const S_IRWXU: number;
const S_IRUSR: number;
const S_IWUSR: number;
const S_IXUSR: number;
const S_IRWXG: number;
const S_IRGRP: number;
const S_IWGRP: number;
const S_IXGRP: number;
const S_IRWXO: number;
const S_IROTH: number;
const S_IWOTH: number;
const S_IXOTH: number;
const S_IFLNK: number;
const O_CREAT: number;
const O_EXCL: number;
const O_NOCTTY: number;
const O_DIRECTORY: number;
const O_NOATIME: number;
const O_NOFOLLOW: number;
const O_SYNC: number;
const O_DSYNC: number;
const O_SYMLINK: number;
const O_DIRECT: number;
const O_NONBLOCK: number;
const O_TRUNC: number;
const O_APPEND: number;
const F_OK: number;
const R_OK: number;
const W_OK: number;
const X_OK: number;
const COPYFILE_EXCL: number;
const COPYFILE_FICLONE: number;
const COPYFILE_FICLONE_FORCE: number;
const UV_UDP_REUSEADDR: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGQUIT` instead. */
const SIGQUIT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTRAP` instead. */
const SIGTRAP: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGIOT` instead. */
const SIGIOT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGBUS` instead. */
const SIGBUS: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUSR1` instead. */
const SIGUSR1: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUSR2` instead. */
const SIGUSR2: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPIPE` instead. */
const SIGPIPE: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGALRM` instead. */
const SIGALRM: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGCHLD` instead. */
const SIGCHLD: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSTKFLT` instead. */
const SIGSTKFLT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGCONT` instead. */
const SIGCONT: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSTOP` instead. */
const SIGSTOP: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTSTP` instead. */
const SIGTSTP: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTTIN` instead. */
const SIGTTIN: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTTOU` instead. */
const SIGTTOU: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGURG` instead. */
const SIGURG: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGXCPU` instead. */
const SIGXCPU: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGXFSZ` instead. */
const SIGXFSZ: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGVTALRM` instead. */
const SIGVTALRM: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPROF` instead. */
const SIGPROF: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGIO` instead. */
const SIGIO: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPOLL` instead. */
const SIGPOLL: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPWR` instead. */
const SIGPWR: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSYS` instead. */
const SIGSYS: number;
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUNUSED` instead. */
const SIGUNUSED: number;
const defaultCoreCipherList: string;
const defaultCipherList: string;
const ENGINE_METHOD_RSA: number;
const ALPN_ENABLED: number;
} | the_stack |
import { apply, chain, mergeWith, move, Rule, Tree, url, MergeStrategy, SchematicContext, Source } from '@angular-devkit/schematics';
import {
applyAndLog, addOrReplaceScriptInPackageJson, addOpenCollective, updateGitIgnore, addDependencyInjection,
createOrOverwriteFile, addEntryToEnvironment, getMethodBody, updateMethod, addMethod, addImportStatement, getDistFolder,
isUniversal, getBrowserDistFolder, getServerDistFolder, implementInterface, getNgToolkitInfo, updateNgToolkitInfo, addDependencyToPackageJson, parseYML2JS, parseJS2YML
} from '@ng-toolkit/_utils';
import { getFileContent } from '@schematics/angular/utility/test';
import { NodePackageInstallTask } from '@angular-devkit/schematics/tasks';
import { Path } from '@angular-devkit/core';
import { NodeDependencyType } from '@schematics/angular/utility/dependencies';
import { IServerlessSchema } from './schema';
import outdent from 'outdent';
import bugsnag from '@bugsnag/js';
const bugsnagClient = bugsnag('0b326fddc255310e516875c9874fed91');
export default function addServerless(options: IServerlessSchema): Rule {
if (!options.clientProject) {
options.clientProject = options.project;
}
// Register bugsnag in order to catch and notify any rule error.
bugsnagClient.config.beforeSend = (report) => {
report.metaData = {
subsystem: {
package: 'serverless',
options: options
}
}
}
// Initialize Serverless property with empty object values.
options.serverless = {
aws: {},
gcloud: {}
};
// Move source files to resolved path in the virtual tree.
const templateSource = apply(url('files/common'), [
move(options.directory),
]);
// Create an empty array to push our rules.
const rules: Rule[] = [];
// Check if Universal and Serverless Rules
rules.push(checkIfUniversal(options, templateSource));
rules.push(checkIfServerless(options));
// Add Dependencies to package json by using custom function instead of Angular built-in.
// The Angular way do not let us customize the project path to check for package.json.
rules.push((tree: Tree) => {
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'ts-loader',
version: '^6.2.1',
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'webpack-cli',
version: '^3.3.10'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Default,
name: 'cors',
version: '^2.8.5'
});
// TODO: Remove the note below.
// cp-cli got deprecated by the author in favour of cpy-cli.
if (options.provider === 'firebase') {
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Default,
name: 'cpy-cli',
version: '^3.0.0'
});
}
return tree;
});
// Add Open Collective postinstall script
rules.push(addOpenCollective(options));
// Check passed providers and generate proper files along with further package json changes.
if (options.provider === 'firebase') {
rules.push(updateGitIgnore(options, '/functions/node_modules/'));
const source = apply(url('./files/firebase'), [
move(options.directory)
]);
rules.push(setFirebaseFunctions(options));
rules.push(addOrReplaceScriptInPackageJson(options, 'build:prod:deploy', 'npm run build:prod && cd functions && npm install && cd .. && firebase deploy'));
rules.push(mergeWith(source, MergeStrategy.Overwrite));
}
if (options.provider === 'gcloud' || options.provider === 'aws') {
// We need serverless dependency to properly set Google Cloud or AWS Dependencies.
rules.push((tree: Tree) => {
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'serverless',
version: '^1.60.0'
});
return tree;
});
if (options.provider === 'gcloud') {
rules.push(addServerlessGcloud(options));
} else if (options.provider === 'aws') {
rules.push(addServerlessAWS(options));
}
}
if (options.provider === 'aws' && options.offline) {
rules.push(addServerlessOffline(options));
}
// Generate files in order to run local server on development mode.
// TODO: Check if I shall include all the typescript lambda logic plus webpack in a single function or
// keep the splitted like now.
rules.push(addLocalFile(options));
rules.push(editTSConfigFile(options));
// TODO: Add webpack typescript config file option.
rules.push((tree: Tree) => {
let webpack = getFileContent(tree, `${options.directory}/webpack.server.config.js`);
tree.overwrite(`${options.directory}/webpack.server.config.js`, webpack.replace("__distFolder__", getDistFolder(tree, options)));
});
if (options.provider !== 'firebase') {
rules.push(updateEnvironment(options));
rules.push(updateAppEntryFile(options));
}
// Modify package scripts according to provider
rules.push(addBuildScriptsAndFiles(options));
if (!options.skipInstall) {
rules.push((tree: Tree, context: SchematicContext) => {
tree.exists('.'); // noop
context.addTask(new NodePackageInstallTask(options.directory));
})
}
rules.push((tree: Tree) => {
const ngToolkitSettings = getNgToolkitInfo(tree, options);
ngToolkitSettings.serverless = options;
updateNgToolkitInfo(tree, ngToolkitSettings, options);
});
if (!options.disableBugsnag) {
return applyAndLog(chain(rules), bugsnagClient);
} else {
return chain(rules);
}
}
function checkIfUniversal(options: IServerlessSchema, templateSource: Source): Rule {
return (tree: Tree) => {
const ngToolkitSettings = getNgToolkitInfo(tree, options);
if (!ngToolkitSettings.universal) {
const subRules: Rule[] = [];
subRules.push(mergeWith(templateSource, MergeStrategy.Overwrite));
subRules.push((subTree: Tree) => {
if (isUniversal(subTree, options)) {
subTree.rename(`${options.directory}/server_universal.ts`, `${options.directory}/server.ts`);
subTree.delete(`${options.directory}/server_static.ts`);
} else {
subTree.delete(`${options.directory}/server_universal.ts`);
subTree.rename(`${options.directory}/server_static.ts`, `${options.directory}/server.ts`);
}
const serverFileContent = getFileContent(tree, `${options.directory}/server.ts`);
tree.overwrite(`${options.directory}/server.ts`, serverFileContent
.replace('__distBrowserFolder__', getBrowserDistFolder(tree, options))
.replace('__distServerFolder__', getServerDistFolder(tree, options)));
return subTree;
})
return chain(subRules);
} else {
return tree;
}
}
}
function checkIfServerless(options: IServerlessSchema): Rule {
return (tree: Tree) => {
const ngToolkitSettings = getNgToolkitInfo(tree, options);
if (ngToolkitSettings.serverless) {
switch (options.provider) {
case 'aws': {
tree.delete(`${options.directory}/lambda.js`);
tree.delete(`${options.directory}/lambda.ts`);
tree.delete(`${options.directory}/serverless.yml`);
break;
}
case 'gcloud': {
tree.delete(`${options.directory}/index.js`);
tree.delete(`${options.directory}/serverless.yml`);
break;
}
case 'firebase': {
tree.delete(`${options.directory}/functions/index.js`);
break;
}
}
}
return tree;
}
}
function setFirebaseFunctions(options: IServerlessSchema): Rule {
return (tree: Tree) => {
createOrOverwriteFile(tree, `${options.directory}/functions/package.json`, outdent`
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase serve --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions",
"logs": "firebase functions:log"
},
"dependencies": {
"firebase-admin": "^8.9.0",
"firebase-functions": "^3.3.0"
},
"private": true
}`);
let firebaseProjectSettings = {};
if (options.firebaseProject) {
firebaseProjectSettings = {
projects: {
default: options.firebaseProject
}
};
}
if (!tree.exists(`${options.directory}/.firebaserc`)) {
tree.create(`${options.directory}/.firebaserc`, JSON.stringify(firebaseProjectSettings, null, 2));
}
let firebaseJson: any;
if (tree.exists(`${options.directory}/firebase.json`)) {
firebaseJson = JSON.parse(getFileContent(tree, `${options.directory}/firebase.json`));
firebaseJson.hosting = {
"public": "functions/dist",
"rewrites": [
{
"source": "**",
"function": "http"
}
]
};
} else {
firebaseJson = {
"hosting": {
"public": "functions/dist",
"rewrites": [
{
"source": "**",
"function": "http"
}
]
}
};
}
createOrOverwriteFile(tree, `${options.directory}/firebase.json`, JSON.stringify(firebaseJson, null, 2));
return tree;
}
}
function addBuildScriptsAndFiles(options: IServerlessSchema): Rule {
return (tree: Tree) => {
const packageJsonSource = JSON.parse(getFileContent(tree, `${options.directory}/package.json`));
const universal: boolean = isUniversal(tree, options);
let serverlessBasePath;
switch (options.provider) {
default: serverlessBasePath = '/'; break;
case 'aws': serverlessBasePath = '/production/'; break;
case 'gcloud': serverlessBasePath = '/http/'; break;
}
packageJsonSource.scripts['build:browser:prod'] = `ng build --prod`;
packageJsonSource.scripts['build:browser:serverless'] = `ng build --prod --base-href ${serverlessBasePath}`;
packageJsonSource.scripts['build:serverless'] = `npm run build:browser:serverless && npm run build:server:serverless`;
packageJsonSource.scripts['build:prod'] = `npm run build:browser:prod && npm run build:server:prod`;
packageJsonSource.scripts['server'] = `${options.lambdaTS ? 'ts-' : ''}node local.${options.lambdaTS ? 'ts' : 'js'}`;
packageJsonSource.scripts['build:prod:deploy'] = `npm run build:prod && npm run deploy`;
packageJsonSource.scripts['build:serverless:deploy'] = `npm run build:serverless && npm run deploy`;
if (options.provider === 'firebase') {
packageJsonSource.scripts['deploy'] = `cpy-cli dist/ functions/dist/ && cd functions && npm install && firebase deploy`;
} else {
packageJsonSource.scripts['deploy'] = `serverless deploy`;
}
if (universal) {
packageJsonSource.scripts['build:server:prod'] = `ng run ${options.clientProject}:server && webpack --config webpack.server.config.js --progress --colors`;
if (options.provider != 'firebase') {
packageJsonSource.scripts['build:server:serverless'] = `ng run ${options.clientProject}:server:serverless && webpack --config webpack.server.config.js --progress --colors`;
} else {
packageJsonSource.scripts['build:server:serverless'] = `ng run ${options.clientProject}:server && webpack --config webpack.server.config.js --progress --colors`;
}
} else {
packageJsonSource.scripts['build:server:prod'] = `webpack --config webpack.server.config.js --progress --colors`;
packageJsonSource.scripts['build:server:serverless'] = `webpack --config webpack.server.config.js --progress --colors`;
}
tree.overwrite(`${options.directory}/package.json`, JSON.stringify(packageJsonSource, null, 2));
return tree;
}
}
function addServerlessAWS(options: IServerlessSchema): Rule {
const fileName = options.serverless && options.serverless.aws && options.serverless.aws.filename ? options.serverless.aws.filename : 'serverless.yml';
const source = apply(url('./files/aws'), [
move(options.directory)
]);
return chain([
mergeWith(source),
(tree: Tree) => {
tree.rename(`${options.directory}/serverless-aws.yml`, `${options.directory}/${fileName}`);
tree.overwrite(`${options.directory}/${fileName}`, getFileContent(tree, `${options.directory}/${fileName}`).replace('__appName__', options.clientProject.toLowerCase()));
// Add `serverless-plugin-typescript` to the serverless yml file.
if (options.lambdaTS) {
const data = parseYML2JS(tree, `${options.directory}/${fileName}`);
data.plugins.push('serverless-plugin-typescript');
parseJS2YML(tree, data, `${options.directory}/${fileName}`);
}
// Remove lambda file based on `lambdaTS` option.
tree.delete(`${options.directory}/lambda.${options.lambdaTS ? 'js' : 'ts'}`);
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Default,
name: 'aws-serverless-express',
version: '^3.3.6'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'serverless-apigw-binary',
version: '^0.4.4'
});
return tree;
}
]);
}
function addServerlessGcloud(options: IServerlessSchema): Rule {
const fileName = options.serverless && options.serverless.gcloud && options.serverless.gcloud.filename ? options.serverless.gcloud.filename : 'serverless.yml';
const source = apply(url('./files/gcloud'), [
move(options.directory)
]);
return chain([
mergeWith(source),
(tree: Tree) => {
tree.rename(`${options.directory}/serverless-gcloud.yml`, `${options.directory}/${fileName}`);
tree.overwrite(`${options.directory}/${fileName}`, getFileContent(tree, `${options.directory}/${fileName}`).replace('__appName__', options.clientProject.toLowerCase()));
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'firebase-admin',
version: '^8.9.0'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'firebase-functions',
version: '^3.3.0'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Default,
name: 'serverless-google-cloudfunctions',
version: '^2.3.3'
});
return tree;
}
]);
}
// Generate lambda handles by using Typescript instead of plain javascript.
// This will require extra serverless plugins in order to transpile the files into plain js ones.
function addLocalFile(options: IServerlessSchema): Rule {
return _ => options.lambdaTS ? addLocalTypescript(options) : addLocalJavascript(options);
}
function addLocalJavascript(options: IServerlessSchema): Rule {
return tree => {
createOrOverwriteFile(tree, `${options.directory}/local.js`, outdent`
const port = process.env.PORT || 8080;
const server = require('./${getDistFolder(tree, options)}/server');
server.app.listen(port, () => {
console.log("Listening on: http://localhost:" + port);
});
`);
}
}
function addLocalTypescript(options: IServerlessSchema): Rule {
return tree => {
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Default,
name: 'aws-serverless-express',
version: '^3.3.6'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'serverless-plugin-typescript',
version: '^1.1.9'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: '@types/aws-lambda',
version: '^8.10.36'
});
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: '@types/aws-serverless-express',
version: '^3.3.2'
});
createOrOverwriteFile(tree, `${options.directory}/local.ts`, outdent`
import { run } from './dist/server';
// Run locally our express server
run();
`);
}
}
/**
* Change the default `tsconfig.json` file to enable default imports for some JS packages.
* Also set the module code generation to CommonJS to enable webpack typescript compilation and other stuff.
* @param options serverless options schema
*/
function editTSConfigFile(options: IServerlessSchema): Rule {
return tree => {
const tsConfig: any = JSON.parse(getFileContent(tree, `${options.directory}/tsconfig.json`));
tsConfig.compilerOptions['esModuleInterop'] = true;
tsConfig.compilerOptions['allowSyntheticDefaultImports'] = true;
tsConfig.compilerOptions['module'] = 'commonjs';
tree.overwrite(`${options.directory}/tsconfig.json`, JSON.stringify(tsConfig, null, 2));
return tree;
}
}
/**
* Parse YML file into JS Object to add the properties required for serverless-offline to work with serverless.
* Also add the `serverless-offline` as devDependency.
*/
function addServerlessOffline(options: IServerlessSchema): Rule {
const fileName = options.serverless && options.serverless.aws && options.serverless.aws.filename ? options.serverless.aws.filename : 'serverless.yml';
return tree => {
const data = parseYML2JS(tree, `${options.directory}/${fileName}`);
data.plugins.push('serverless-offline');
data.package['include'] = ['dist/**']
parseJS2YML(tree, data, `${options.directory}/${fileName}`);
// Add serverless-offline as dev dependency.
addDependencyToPackageJson(tree, options, {
type: NodeDependencyType.Dev,
name: 'serverless-offline',
version: '^6.0.0-alpha.56'
});
return tree
}
}
function updateEnvironment(options: IServerlessSchema): Rule {
return tree => {
if (!isUniversal(tree, options) || options.provider === 'firebase') {
return tree;
}
let serverlessBasePath: string;
serverlessBasePath = options.provider === 'aws' ? '/production/' : '/http/';
createOrOverwriteFile(tree, `${options.directory}/src/environments/environment.serverless.ts`, getFileContent(tree, `${options.directory}/src/environments/environment.prod.ts`));
tree.getDir(`${options.directory}/src/environments`).visit((path: Path) => {
if (path.endsWith('.ts')) {
addEntryToEnvironment(tree, path, 'baseHref', path.indexOf('serverless') > -1 ? serverlessBasePath : '/');
}
});
//update CLI with new configuration
const cliConfig: any = JSON.parse(getFileContent(tree, `${options.directory}/angular.json`));
const project: any = cliConfig.projects[options.clientProject].architect;
for (let property in project) {
if (project.hasOwnProperty(property) && (project[property].builder === '@angular-devkit/build-angular:server')) {
if (!project[property].configurations) {
project[property].configurations = {};
}
project[property].configurations.serverless = {
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.serverless.ts"
}
]
};
}
}
tree.overwrite(`${options.directory}/angular.json`, JSON.stringify(cliConfig, null, 2));
return tree;
}
}
function updateAppEntryFile(options: IServerlessSchema): Rule {
return tree => {
if (!isUniversal(tree, options)) {
return tree;
}
const appComponentFilePath = `${options.directory}/src/app/app.component.ts`;
const ngOnInit = getMethodBody(tree, appComponentFilePath, 'ngOnInit');
addImportStatement(tree, appComponentFilePath, 'environment', '../environments/environment');
implementInterface(tree, appComponentFilePath, 'OnInit', '@angular\/core');
addImportStatement(tree, appComponentFilePath, 'Inject', '@angular\/core');
addImportStatement(tree, appComponentFilePath, 'isPlatformBrowser', '@angular\/common');
addDependencyInjection(tree, appComponentFilePath, 'document', 'any', '@angular/common', 'DOCUMENT');
addDependencyInjection(tree, appComponentFilePath, 'platformId', 'any', '@angular/core', 'PLATFORM_ID');
if (ngOnInit) {
updateMethod(tree, appComponentFilePath, 'ngOnInit', ngOnInit + outdent`
if (!isPlatformBrowser(this.platformId)) {
const bases = this.document.getElementsByTagName('base');
if (bases.length > 0) {
bases[0].setAttribute('href', environment.baseHref);
}
}`
);
} else {
addMethod(tree, appComponentFilePath, outdent`
public ngOnInit(): void {
if (!isPlatformBrowser(this.platformId)) {
const bases = this.document.getElementsByTagName('base');
if (bases.length > 0) {
bases[0].setAttribute('href', environment.baseHref);
}
}
}`
);
}
return tree;
}
} | the_stack |
import * as vscode from 'vscode';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as Git from './git';
import * as WebRequest from 'web-request';
import * as semver from 'semver';
import * as archiver from 'archiver';
import { jar } from 'request';
import { spawn } from 'child_process';
import { BufferSplitter } from './BufferSplitter';
import { ModManager } from './ModManager';
interface ModPackageScripts {
compile?: string
datestamp?: string
prepackage?: string
version?: string
prepublish?: string
publish?: string
postpublish?: string
};
export interface ModInfo {
// actual Factorio fields:
name: string
version: string
factorio_version: string
title: string
author: string
homepage: string
contact: string
description: string
dependencies: string[]
// my extensions for packaging:
package?: {
ignore?: string[]
no_git_push?: boolean
no_git_tag?: boolean
git_publish_branch?: string|null
no_portal_upload?: boolean
scripts?: ModPackageScripts
}
};
interface AdjustModsDefinition extends vscode.TaskDefinition {
type: "factorio"
command: "adjustMods"
adjustMods: {[keys:string]:string|boolean}
modsPath: string
disableExtraMods?:boolean
allowDisableBaseMod?:boolean
}
export class ModTaskProvider implements vscode.TaskProvider{
constructor(private readonly modPackages: Map<string, ModPackage>) {}
provideTasks(token?: vscode.CancellationToken | undefined): vscode.ProviderResult<vscode.Task[]> {
const tasks:vscode.Task[] = [];
const latest = ModPackage.latestPackages(this.modPackages.values());
for (const modpackage of this.modPackages.values()) {
if (!latest.has(modpackage)){continue;}
if (modpackage.scripts?.compile)
{
tasks.push(new vscode.Task(
{label:`${modpackage.label}.compile`,type:"factorio",modname:modpackage.label,command:"compile"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.compile`,
"factorio",
modpackage.CompileTask(),
[]
));
}
tasks.push(new vscode.Task(
{label:`${modpackage.label}.datestamp`,type:"factorio",modname:modpackage.label,command:"datestamp"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.datestamp`,
"factorio",
modpackage.DateStampTask(),
[]
));
tasks.push(new vscode.Task(
{label:`${modpackage.label}.package`,type:"factorio",modname:modpackage.label,command:"package"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.package`,
"factorio",
modpackage.PackageTask(),
[]
));
tasks.push(new vscode.Task(
{label:`${modpackage.label}.version`,type:"factorio",modname:modpackage.label,command:"version"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.version`,
"factorio",
modpackage.IncrementTask(),
[]
));
tasks.push(new vscode.Task(
{label:`${modpackage.label}.upload`,type:"factorio",modname:modpackage.label,command:"upload"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.upload`,
"factorio",
modpackage.PostToPortalTask(),
[]
));
tasks.push(new vscode.Task(
{label:`${modpackage.label}.publish`,type:"factorio",modname:modpackage.label,command:"publish"},
vscode.workspace.getWorkspaceFolder(modpackage.resourceUri) || vscode.TaskScope.Workspace,
`${modpackage.label}.publish`,
"factorio",
modpackage.PublishTask(),
[]
));
};
return tasks;
}
resolveTask(task: vscode.Task, token?: vscode.CancellationToken | undefined): vscode.ProviderResult<vscode.Task> {
if (task.definition.type === "factorio")
{
let execution:vscode.CustomExecution|undefined;
if (task.definition.command === "adjustMods")
{
if (!task.definition.adjustMods)
{
execution = this.ConfigErrorTask(task.definition,"missing `adjustMods`");
}
else if (!task.definition.modsPath)
{
execution = this.ConfigErrorTask(task.definition,"missing `modsPath`");
}
else
{
execution = this.AdjustModsTask(<AdjustModsDefinition>task.definition);
}
}
else
{
if (!task.definition.modname)
{
execution = this.ConfigErrorTask(task.definition,"missing `modname`");
}
else
{
const latest = ModPackage.latestPackages(this.modPackages.values());
for (const modpackage of this.modPackages.values()) {
if (modpackage.label === task.definition.modname && latest.has(modpackage)) {
const mp = modpackage;
switch (task.definition.command) {
case "compile":
execution = mp.CompileTask();
break;
case "datestamp":
execution = mp.DateStampTask();
break;
case "package":
execution = mp.PackageTask();
break;
case "version":
execution = mp.IncrementTask();
break;
case "upload":
execution = mp.PostToPortalTask();
break;
case "publish":
execution = mp.PublishTask();
break;
default:
execution = this.ConfigErrorTask(task.definition,`unknown \`command\` "${task.definition.command}"`);
}
break;
}
}
if (!execution)
{
execution = this.ConfigErrorTask(task.definition,`mod "${task.definition.modname}" not found`);
}
}
}
return new vscode.Task(
task.definition,
task.scope || vscode.TaskScope.Workspace,
task.name,
task.source,
execution,
[]);
}
return undefined;
}
private async ConfigError(term:ModTaskTerminal,def:vscode.TaskDefinition,error:string): Promise<void>
{
term.write(error+"\n");
term.write(JSON.stringify(def,undefined,2));
}
private ConfigErrorTask(def:vscode.TaskDefinition,error:string): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.ConfigError(term,def,error);
term.close();
});
});
}
private async AdjustMods(term:ModTaskTerminal,def:AdjustModsDefinition): Promise<void>
{
def.modsPath = def.modsPath.replace(/\\/g,"/");
term.write(`Using modsPath ${def.modsPath}\n`);
const manager = new ModManager(def.modsPath);
if (!def.allowDisableBaseMod) {def.adjustMods["base"] = true;}
if (def.disableExtraMods) {
term.write(`All Mods disabled\n`);
manager.disableAll();
}
for (const mod in def.adjustMods) {
if (def.adjustMods.hasOwnProperty(mod))
{
const adjust = def.adjustMods[mod];
manager.set(mod,adjust);
term.write(`${mod} ${
adjust === true ? "enabled" :
adjust === false ? "disabled" :
"enabled version " + adjust
}\n`);
}
}
try {
manager.write();
} catch (error) {
term.write(`Failed to save mod list:\n${error}\n`);
}
}
private AdjustModsTask(def:AdjustModsDefinition): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.AdjustMods(term,def);
term.close();
});
});
}
}
export class ModPackage extends vscode.TreeItem {
public label: string; // used as modname
public description: string; // used as modversion
public packageIgnore?: string[];
public noGitPush?: boolean;
public noGitTag?: boolean;
public gitPublishBranch?: string|null;
public noPortalUpload?: boolean;
public scripts?: ModPackageScripts;
constructor(public readonly resourceUri: vscode.Uri, modscript: ModInfo) {
super(resourceUri);
this.label = modscript.name;
this.description = modscript.version;
this.tooltip = modscript.title;
this.command = {
title: 'Open',
command: 'vscode.open',
arguments: [resourceUri]
};
//this.id = modscript.name;
this.packageIgnore = modscript.package?.ignore;
this.noGitPush = modscript.package?.no_git_push;
this.noGitTag = modscript.package?.no_git_tag;
this.gitPublishBranch = modscript.package?.git_publish_branch;
this.noPortalUpload = modscript.package?.no_portal_upload;
this.scripts = modscript.package?.scripts;
}
public static sort(a:ModPackage,b:ModPackage)
{
const namecomp = a.label.toLowerCase().localeCompare(b.label.toLowerCase());
if (namecomp !== 0) {return namecomp * 100;}
const vercomp = semver.compare(a.description,b.description,{"loose":true});
if (vercomp !== 0) {return -vercomp * 10;}
if (a.resourceUri<b.resourceUri) {return -1;}
if (a.resourceUri>b.resourceUri) {return 1;}
return 0;
}
public static latestPackages(packages:IterableIterator<ModPackage>)
{
const byModName = new Map<string,ModPackage[]>();
for (const mp of packages) {
if (byModName.has(mp.label))
{
byModName.get(mp.label)!.push(mp);
}
else
{
byModName.set(mp.label,[mp]);
}
}
const latest = new Set<ModPackage>();
for (const mps of byModName.values()) {
latest.add(mps.reduce((a,b)=>(semver.compare(a.description,b.description,{"loose":true}) < 0) ? b : a));
}
return latest;
}
public async Update()
{
const infodoc = await vscode.workspace.openTextDocument(this.resourceUri);
const jsonstr = infodoc.getText();
const modscript: ModInfo = JSON.parse(jsonstr);
this.label = modscript.name;
this.description = modscript.version;
this.tooltip = modscript.title;
this.packageIgnore = modscript.package?.ignore;
this.noGitPush = modscript.package?.no_git_push;
this.gitPublishBranch = modscript.package?.git_publish_branch;
this.noPortalUpload = modscript.package?.no_portal_upload;
this.scripts = modscript.package?.scripts;
}
private async Compile(term:ModTaskTerminal): Promise<void>
{
const moddir = path.dirname(this.resourceUri.fsPath);
if(this.scripts?.compile)
{
term.write(`Compiling: ${this.resourceUri} ${this.description}\r\n`);
const code = await runScript(term, "compile", this.scripts.compile, moddir,
{ FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:this.description });
if (code !== 0) {return;}
}
}
public CompileTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
await this.Compile(term);
term.close();
});
});
}
private async DateStampChangelog(term:ModTaskTerminal): Promise<boolean|number>
{
const moddir = path.dirname(this.resourceUri.fsPath);
const changelogpath = path.join(moddir, "changelog.txt");
if(fs.existsSync(changelogpath))
{
//datestamp current section
const changelogdoc = await vscode.workspace.openTextDocument(changelogpath);
const syms = <vscode.DocumentSymbol[]>await vscode.commands.executeCommand<(vscode.SymbolInformation|vscode.DocumentSymbol)[]>("vscode.executeDocumentSymbolProvider", changelogdoc.uri);
const current = syms?.find(sym=>sym.name.startsWith(this.description))!;
if (current)
{
const date = current.children.find(sym=>sym.name === "Date");
const we = new vscode.WorkspaceEdit();
if (date)
{
we.replace(changelogdoc.uri,date.selectionRange, new Date().toISOString().substr(0,10));
}
else
{
we.insert(changelogdoc.uri,current.selectionRange.end,`\nDate: ${new Date().toISOString().substr(0,10)}`);
}
await vscode.workspace.applyEdit(we);
await changelogdoc.save();
term.write(`Changelog section ${this.description} stamped ${new Date().toISOString().substr(0,10)}\r\n`);
}
else
{
term.write(`No Changelog section for ${this.description}\r\n`);
}
if (this.scripts?.datestamp) {
const code = await runScript(term, "datestamp", this.scripts.datestamp, moddir,
{ FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:this.description });
if (code !== 0){ return code; }
}
return true;
}
else
{
term.write(`No Changelog found\r\n`);
if (this.scripts?.datestamp) {
const code = await runScript(term, "datestamp", this.scripts.datestamp, moddir,
{ FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:this.description });
if (code !== 0){ return code; }
}
return false;
}
}
public DateStampTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
await this.DateStampChangelog(term);
term.close();
});
});
}
public static async BuildZip(moddir:string,packagepath:string,ignore:string[],name:string,version:string): Promise<number>
{
const zipoutput = fs.createWriteStream(packagepath);
const archive = archiver('zip', { zlib: { level: 9 }});
archive.pipe(zipoutput);
archive.glob("**",{ cwd: moddir, root: moddir, nodir: true, ignore: ignore },{ prefix: `${name}_${version}` });
const bytesWritten = new Promise<number>((resolve,reject)=>{
zipoutput.on("close",()=>resolve(archive.pointer()));
archive.finalize();
});
return bytesWritten;
}
private async Package(term:ModTaskTerminal): Promise<string|undefined>
{
const config = vscode.workspace.getConfiguration(undefined,this.resourceUri);
term.write(`Packaging: ${this.resourceUri} ${this.description}\r\n`);
await this.Compile(term);
const moddir = path.dirname(this.resourceUri.fsPath);
if(this.scripts?.prepackage)
{
const code = await runScript(term, "prepackage", this.scripts.prepackage, moddir,
{ FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:this.description });
if (code !== 0) {return;}
}
let packagebase = moddir;
switch (config.get<string>("factorio.package.zipLocation","inside")) {
case "outside":
packagebase = path.dirname(moddir);
break;
case "inside":
default:
break;
}
const packagepath = path.join(packagebase, `${this.label}_${this.description}.zip`);
const ignore = [`**/${this.label}_*.zip`].concat(this.packageIgnore||[]);
const bytesWritten = await ModPackage.BuildZip(moddir,packagepath,ignore,this.label,this.description);
term.write(`Built ${this.label}_${this.description}.zip ${bytesWritten} bytes\r\n`);
return packagepath;
}
public PackageTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
await this.Package(term);
term.close();
});
});
}
private async IncrementVersion(term:ModTaskTerminal): Promise<string|undefined>
{
const we = new vscode.WorkspaceEdit();
// increment info.json version
const infodoc = await vscode.workspace.openTextDocument(this.resourceUri);
const syms = await vscode.commands.executeCommand<(vscode.SymbolInformation|vscode.DocumentSymbol)[]>
("vscode.executeDocumentSymbolProvider", this.resourceUri);
if (!syms)
{
term.write(`Error: Unable to load document symbols for ${this.resourceUri}\r\n`);
return;
}
const newversion = semver.inc(this.description,'patch',{"loose":true})!;
const version = syms.find(sym=>sym.name === "version")!;
we.replace(this.resourceUri,
version instanceof vscode.SymbolInformation ? version.location.range : version.selectionRange,
`"version": "${newversion}"`);
const moddir = path.dirname(this.resourceUri.fsPath);
const changelogpath = path.join(moddir, "changelog.txt");
let changelogdoc: vscode.TextDocument|undefined;
if(fs.existsSync(changelogpath))
{
//datestamp current section
changelogdoc = await vscode.workspace.openTextDocument(changelogpath);
//insert new section
we.insert(changelogdoc.uri,new vscode.Position(0,0),
"---------------------------------------------------------------------------------------------------\n" +
`Version: ${newversion}\n` +
"Date: ????\n" +
" Changes:\n"
// no placeholder line because prefix alone is not valid...
);
}
await vscode.workspace.applyEdit(we);
await infodoc.save();
// eslint-disable-next-line no-unused-expressions
changelogdoc && await changelogdoc.save();
term.write(`Moved version to ${newversion}\r\n`);
if (this.scripts?.version) {
await runScript(term, "version", this.scripts.version, moddir,
{ FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:newversion });
}
return newversion;
}
public IncrementTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
await this.IncrementVersion(term);
term.close();
});
});
}
private async PostToPortal(packagepath: string, packageversion:string, term:ModTaskTerminal): Promise<boolean>
{
// upload to portal
// TS says this type doesn't work, but it really does...
const cookiejar = <WebRequest.CookieJar><unknown>jar();
try {
const loginform = await WebRequest.get("https://factorio.com/login?mods=1&next=%2Ftrending",{jar:cookiejar});
const logintoken = ((loginform.content.match(/<input [^>]+"csrf_token"[^>]+>/)||[])[0]?.match(/value="([^"]*)"/)||[])[1];
const config = vscode.workspace.getConfiguration(undefined,this.resourceUri);
const username = config.get("factorio.portal.username")
|| process.env["FACTORIO_PORTAL_USERNAME"]
|| await vscode.window.showInputBox({prompt: "Mod Portal Username:", ignoreFocusOut: true });
if(!username) {return false;}
term.write(`Logging in to Mod Portal as '${username}'\r\n`);
const password = config.get("factorio.portal.password")
|| process.env["FACTORIO_PORTAL_PASSWORD"]
|| await vscode.window.showInputBox({prompt: "Mod Portal Password:", password: true, ignoreFocusOut: true });
if (!password) {return false;}
const loginresult = await WebRequest.post("https://factorio.com/login",{jar:cookiejar, throwResponseError: true,
headers:{
referer: "https://factorio.com/login?mods=1&next=%2Ftrending"
},
form:{
csrf_token: logintoken,
username_or_email: username,
password: password,
next_url: "/trending",
next_mods: false
}
});
const loginerr = loginresult.content.match(/<ul class="flashes">[\s\n]*<li>(.*)<\/li>/);
if (loginerr) {throw new Error(loginerr[1]);}
} catch (error) {
term.write(`Failed to log in to Mod Portal: \r\n${error.toString()}\r\n`);
return false;
}
let uploadtoken;
try {
const uploadform = await WebRequest.get(`https://mods.factorio.com/mod/${this.label}/downloads/edit`,{jar:cookiejar, throwResponseError: true});
uploadtoken = uploadform.content.match(/\n\s*token:\s*'([^']*)'/)![1];
} catch (error) {
term.write("Failed to get upload token from Mod Portal: " + error.toString());
return false;
}
let uploadresult;
try {
uploadresult = await WebRequest.post(`https://direct.mods-data.factorio.com/upload/mod/${uploadtoken}`, {jar:cookiejar, throwResponseError: true,
formData:{
file:{
value: fs.createReadStream(packagepath),
options: {
filename: `${this.label}_${packageversion}.zip`,
contentType: 'application/x-zip-compressed'
}
}
}});
} catch (error) {
term.write("Failed to upload zip to Mod Portal: " + error.toString());
return false;
}
const uploadresultjson = JSON.parse(uploadresult.content);
try {
const postresult = await WebRequest.post(`https://mods.factorio.com/mod/${this.label}/downloads/edit`, {
jar:cookiejar, throwResponseError: true,
form:{
file:undefined,
info_json:uploadresultjson.info,
changelog:uploadresultjson.changelog,
filename:uploadresultjson.filename,
file_size: fs.statSync(packagepath).size ,
thumbnail:uploadresultjson.thumbnail
}
});
if (postresult.statusCode === 302) {
term.write(`Published ${this.label} version ${packageversion}`);
}
else
{
const message = postresult.content.match(/category:\s*'error',\s*\n\s*message:\s*'([^']*)'/)![1];
throw message;
}
} catch (error) {
term.write("Failed to post update to Mod Portal: " + error.toString());
return false;
}
return true;
}
public PostToPortalTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
const config = vscode.workspace.getConfiguration(undefined,this.resourceUri);
let packagebase = path.dirname(this.resourceUri.path);
switch (config.get<string>("factorio.package.zipLocation","inside")) {
case "outside":
packagebase = path.dirname(packagebase);
break;
case "inside":
default:
break;
}
const moddir = this.resourceUri.with({path: packagebase});
const direntries = await vscode.workspace.fs.readDirectory(moddir);
const packages = direntries.filter(([name,type])=>{
return type === vscode.FileType.File && name.startsWith(this.label) && name.match(/_\d+\.\d+\.\d+\.zip$/);
}).map(([name,type])=>{return name;}).sort().reverse();
const packagename = await vscode.window.showQuickPick(packages,{ placeHolder: "Select Package to upload" });
if (!packagename)
{
term.close();
return;
}
const packagepath = path.join(moddir.fsPath,packagename);
const packageversion = packagename.match(/_([0-9.]+)\.zip/)![1];
await this.PostToPortal(packagepath,packageversion,term);
term.close();
});
});
}
private async Publish(term:ModTaskTerminal)
{
term.write(`Publishing: ${this.resourceUri} ${this.description}\r\n`);
const moddir = path.dirname(this.resourceUri.fsPath);
const gitExtension = vscode.extensions.getExtension<Git.GitExtension>('vscode.git')!.exports;
const git = gitExtension.getAPI(1);
const repo = git.getRepository(this.resourceUri);
const config = vscode.workspace.getConfiguration(undefined,this.resourceUri);
const packageversion = this.description;
let branchname:string|null;
if (repo)
{
// throw if uncommitted changes
if (repo.state.workingTreeChanges.length > 0)
{
term.write("Cannot Publish with uncommitted changes\r\n");
return;
}
branchname =
(this.gitPublishBranch !== undefined)?
this.gitPublishBranch:
config.get<string|null>("factorio.package.defaultPublishBranch", "master");
if (branchname === null)
{
branchname = repo.state.HEAD?.name!;
}
else
{
// throw if not on publish branch
if (repo.state.HEAD?.name !== branchname)
{
term.write(`Cannot Publish on branch other than '${branchname}'\r\n`);
return;
}
}
}
else
{
term.write("No git repo found\r\n");
}
if(this.scripts?.prepublish)
{
const code = await runScript(term, "prepublish", this.scripts.prepublish, moddir, { FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:packageversion });
if (code !== 0) {return;}
}
const haschangelog = await this.DateStampChangelog(term);
if (typeof haschangelog === "number") {return;}
let tagname:string|undefined;
if (repo)
{
if(haschangelog) {await runScript(term, undefined, `git add changelog.txt`, moddir);}
await runScript(term, undefined,
`git commit --author "${ config.get<string>("factorio.package.autoCommitAuthor")! }" --allow-empty -F -`,
moddir, undefined,
config.get<string>("factorio.package.preparingCommitMessage")!.replace(/\$VERSION/g,packageversion).replace(/\$MODNAME/g,this.label));
if (!this.noGitTag)
{
tagname = config.get<string>("factorio.package.tagName","$VERSION");
tagname = tagname.replace(/\$VERSION/g,packageversion).replace(/\$MODNAME/g,this.label);
if (config.get<boolean>("factorio.package.tagVPrefix"))
{
term.write(`Using deprecated option factorio.package.tagVPrefix. Use factorio.package.tagName instead. \r\n`);
tagname = "v" + tagname;
}
let tagmessage = config.get<string>("factorio.package.tagMessage");
tagmessage = tagmessage?.replace(/\$VERSION/g,packageversion).replace(/\$MODNAME/g,this.label);
await runScript(term, undefined, `git tag -a ${tagname} -F -`, moddir,undefined,tagmessage);
}
}
// build zip with <factorio.package>
const packagepath = await this.Package(term);
if (!packagepath) {return;}
const newversion = await this.IncrementVersion(term);
if (!newversion) {return;}
if(this.scripts?.publish)
{
const code = await runScript(term, "publish", this.scripts.publish, moddir, { FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:packageversion });
if (code !== 0) {return;}
}
if (repo)
{
await runScript(term, undefined, `git add info.json`, moddir);
if(haschangelog) {await runScript(term, undefined, `git add changelog.txt`, moddir);}
await runScript(term, undefined,
`git commit --author "${ config.get<string>("factorio.package.autoCommitAuthor")! }" -F -`,
moddir,undefined,
config.get<string>("factorio.package.movedToCommitMessage")!.replace(/\$VERSION/g,newversion).replace(/\$MODNAME/g,this.label));
if(!this.noGitPush)
{
const upstream = repo?.state.HEAD?.upstream;
if (upstream)
{
await runScript(term, undefined, `git push ${upstream.remote} ${branchname!} ${tagname ?? ""}`, moddir);
}
else
{
term.write(`no remote set as upstream on ${branchname!}\r\n`);
}
}
}
if(!this.noPortalUpload && ! await this.PostToPortal(packagepath, packageversion, term))
{
return;
}
if(this.scripts?.postpublish)
{
const code = await runScript(term, "postpublish", this.scripts.postpublish, moddir, { FACTORIO_MODNAME:this.label, FACTORIO_MODVERSION:packageversion, FACTORIO_MODPACKAGE:packagepath });
if (code !== 0) {return;}
}
if (config.get<boolean>("factorio.package.removeZipAfterPublish",false))
{
fs.unlinkSync(packagepath);
}
}
public PublishTask(): vscode.CustomExecution
{
return new vscode.CustomExecution(async ()=>{
return new ModTaskPseudoterminal(async term =>{
await this.Update();
await this.Publish(term);
term.close();
});
});
}
}
export class ModsTreeDataProvider implements vscode.TreeDataProvider<vscode.TreeItem>, vscode.Disposable {
private readonly _onDidChangeTreeData: vscode.EventEmitter<vscode.TreeItem | undefined> = new vscode.EventEmitter<vscode.TreeItem | undefined>();
readonly onDidChangeTreeData: vscode.Event<vscode.TreeItem | undefined> = this._onDidChangeTreeData.event;
private readonly modPackages: Map<string, ModPackage>;
private readonly subscriptions:{dispose():void}[] = [this._onDidChangeTreeData];
constructor() {
this.modPackages = new Map<string, ModPackage>();
vscode.workspace.findFiles('**/info.json').then(infos => { infos.forEach(this.updateInfoJson, this); });
const infoWatcher = vscode.workspace.createFileSystemWatcher('**/info.json');
this.subscriptions.push(infoWatcher.onDidChange(this.updateInfoJson, this));
this.subscriptions.push(infoWatcher.onDidCreate(this.updateInfoJson, this));
this.subscriptions.push(infoWatcher.onDidDelete(this.removeInfoJson, this));
this.subscriptions.push(infoWatcher);
this.subscriptions.push(vscode.tasks.registerTaskProvider("factorio",new ModTaskProvider(this.modPackages)));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.openchangelog",async (mp:ModPackage) => {
try {
vscode.window.showTextDocument(vscode.Uri.joinPath(mp.resourceUri,"../changelog.txt"));
} catch (error) {
vscode.window.showErrorMessage(error);
}
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.compile",async (mp:ModPackage) => {
const compiletask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command = "compile" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(compiletask);
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.datestamp",async (mp:ModPackage) => {
const datestamptask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command = "datestamp" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(datestamptask);
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.package",async (mp:ModPackage) => {
const packagetask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command === "package" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(packagetask);
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.version",async (mp:ModPackage) => {
const versiontask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command === "version" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(versiontask);
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.upload",async (mp:ModPackage) => {
const uploadtask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command === "upload" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(uploadtask);
}));
this.subscriptions.push(
vscode.commands.registerCommand("factorio.publish",async (mp:ModPackage) => {
const publishtask = (await vscode.tasks.fetchTasks({type:"factorio"})).find(t=>
t.definition.command === "publish" && t.definition.modname === mp.label)!;
await vscode.tasks.executeTask(publishtask);
}));
}
dispose() {
this.subscriptions.forEach(d=>d.dispose());
}
private async updateInfoJson(uri: vscode.Uri) {
if(uri.scheme === "file")
{
const infodoc = await vscode.workspace.openTextDocument(uri);
const jsonstr = infodoc.getText();
if (jsonstr)
{
const modscript: ModInfo = JSON.parse(jsonstr);
if (modscript && modscript.name) {
if (this.modPackages.has(uri.toString())) {
await this.modPackages.get(uri.toString())?.Update();
}
else
{
this.modPackages.set(uri.toString(), new ModPackage(uri, modscript));
}
}
else {
this.modPackages.delete(uri.toString());
}
}
else {
this.modPackages.delete(uri.toString());
}
}
else {
this.modPackages.delete(uri.toString());
}
this._onDidChangeTreeData.fire(undefined);
}
private async removeInfoJson(uri: vscode.Uri) {
this.modPackages.delete(uri.toString());
this._onDidChangeTreeData.fire(undefined);
}
getTreeItem(element: vscode.TreeItem): vscode.TreeItem | Thenable<vscode.TreeItem> {
return element;
}
async getChildren(element?: vscode.TreeItem | undefined): Promise<vscode.TreeItem[]> {
if (!element) {
const items: vscode.TreeItem[] = [];
if (this.modPackages) {
const latest = ModPackage.latestPackages(this.modPackages.values());
for (const modscript of this.modPackages.values()) {
if (latest.has(modscript)) {
items.push(modscript);
const context = ["latest"];
if (modscript.scripts?.compile){
context.push("hascompile");
}
try {
await vscode.workspace.fs.stat(vscode.Uri.joinPath(modscript.resourceUri,"../changelog.txt"));
context.push("haschangelog");
} catch (error) {}
modscript.contextValue = context.join(" ");
modscript.collapsibleState = (()=>{
for (const other of this.modPackages.values()) {
if (modscript.label === other.label && !latest.has(other)){
return vscode.TreeItemCollapsibleState.Collapsed;
}
}
return vscode.TreeItemCollapsibleState.None;
})();
}
}
}
return items.sort(ModPackage.sort);
}
else if (element instanceof ModPackage) {
const items: vscode.TreeItem[] = [];
if (this.modPackages) {
const latest = ModPackage.latestPackages(this.modPackages.values());
if (latest.has(element)) {
this.modPackages.forEach((modscript, uri) => {
if (modscript.label === element.label && !latest.has(modscript)) {
items.push(modscript);
modscript.contextValue = "older";
}
});
}
}
return items.sort(ModPackage.sort);
}
else {
return [];
}
}
}
interface ModTaskTerminal {
write(data:string):void
close():void
}
async function runScript(term:ModTaskTerminal, name:string|undefined, command:string, cwd:string, env?:NodeJS.ProcessEnv,stdin?:string): Promise<number>
{
const config = vscode.workspace.getConfiguration(undefined,vscode.Uri.parse(cwd) );
let configenv: Object | undefined;
let configshell: string | undefined;
let configautoshell: string | undefined;
switch (os.platform()) {
case "win32":
configenv = config.get<Object>("terminal.integrated.env.windows");
configshell = config.get<string>("terminal.integrated.shell.windows");
configautoshell = config.get<string>("terminal.integrated.automationShell.windows");
break;
case "darwin":
configenv = config.get<Object>("terminal.integrated.env.osx");
configshell = config.get<string>("terminal.integrated.shell.osx");
configautoshell = config.get<string>("terminal.integrated.automationShell.osx");
break;
default:
configenv = config.get<Object>("terminal.integrated.env.linux");
configshell = config.get<string>("terminal.integrated.shell.linux");
configautoshell = config.get<string>("terminal.integrated.automationShell.linux");
break;
}
const scriptenv = Object.assign({}, process.env, configenv, env || {} );
return new Promise((resolve,reject)=>{
if(name)
{
term.write(`>> Running mod script "${name}": ${command} <<\r\n`);
}
else
{
term.write(`${command}\r\n`);
}
const scriptProc = spawn(command, {
cwd: cwd,
env: scriptenv,
shell: configautoshell ?? configshell ?? true,
stdio: "pipe"
});
const stdout = new BufferSplitter(scriptProc.stdout, Buffer.from("\n"));
stdout.on("segment", (chunk:Buffer) => {
term.write(chunk.toString()+"\r\n");
});
const stderr = new BufferSplitter(scriptProc.stderr, Buffer.from("\n"));
stderr.on("segment", (chunk:Buffer) => {
term.write(chunk.toString()+"\r\n");
});
scriptProc.on('close', (code,signal) => {
if(name)
{
term.write(`>> Mod script "${name}" returned ${code} <<\r\n`);
}
resolve(code ?? -1);
});
scriptProc.on("error", (error) => {
if(name)
{
term.write(`>> Mod script "${name}" failed: ${error.message} <<\r\n`);
}
else
{
term.write(`${error.message}\r\n`);
}
});
if (stdin)
{
scriptProc.stdin.write(stdin);
}
scriptProc.stdin.end();
});
}
class ModTaskPseudoterminal implements vscode.Pseudoterminal {
private readonly writeEmitter = new vscode.EventEmitter<string>();
onDidWrite: vscode.Event<string> = this.writeEmitter.event;
private readonly closeEmitter = new vscode.EventEmitter<void>();
onDidClose?: vscode.Event<void> = this.closeEmitter.event;
private readonly tokensource = new vscode.CancellationTokenSource();
constructor(private readonly runner:(term:ModTaskTerminal,token?:vscode.CancellationToken)=>void|Promise<void>) {}
async open(initialDimensions: vscode.TerminalDimensions | undefined): Promise<void> {
const writeEmitter = this.writeEmitter;
const closeEmitter = this.closeEmitter;
await this.runner({
write: (data) => writeEmitter.fire(data.replace(/\r?\n/g,"\r\n")),
close: () => closeEmitter.fire()
}, this.tokensource.token);
closeEmitter.fire();
}
close(): void {
this.tokensource.cancel();
}
} | the_stack |
declare var _debug_: Debug;
import { Debug } from './debug/debug';
import LlRbTree from 'flo-ll-rb-tree';
import { ContactPoint, compareCps } from './contact-point';
import { removeCpNode } from './cp-node/remove';
import { getCurveToNext } from './get-curve-to-next';
/**
* The primary class of the library.
*
* Since the MAT is a full representation of the shape boundary an instance of
* this class contains both the information of a boundary point and a medial
* axis point (and edge to the next point(s)). It also contains edges to other
* [[CpNode]]s which allows for traversal of the MAT and thus implictly
* represents the entire MAT.
*
* To get the maximal disk circle (of which the center is on the medial axis)
* use [[cp]].circle.
*
* To get the boundary point, use [[cp]].pointOnShape.
*
* The edge [[next]] (resp. [[prev]]) allows one to move anti-clockwise (resp.
* clockwise) on the shape boundary to the next [[CpNode]]. This also imposes a
* direction of traversal of the MAT edges and vertices.
*
* The edge [[nextOnCircle]] (resp. [[prevOnCircle]]) allows one to go
* anti-clockwise (resp. clockwise) around the maximal disks implied by
* the CpNode to the next maximal disk contact point. This is equivalent to
* following other branches on the MAT.
*
* Call [[getCurveBetween]](cpNodeFrom, cpNodeTo) or getCurveToNext(cpNode)
* (replacing the older CpNode.[[matCurveToNextVertex]]) to get a bezier curve
* from the maximal disk of this [[CpNode]] to the next [[CpNode]]'s
* maximal disk and thus directly representing a piece of the medial axis.
*
* The function, [[getChildren]], returns the children of this [[CpNode]] when
* seen as a MAT edge. Only children in a 'forward' direction are returned. These
* include all edges except the 'backward' edge given by [[prevOnCircle]]. For
* [[CpNode]]s having a maximal disk with 2 contact points (a 2-prong, the usual
* case) the children will be the single edge [[next]]. For a 3-prong this will
* be the edges [[next]] and [[nextOnCircle]], etc. [[getChildren]] allows one to
* easily traverse the MAT tree - see e.g. the implementation of [[traverseEdges]].
*
* The getter, [[vertexChildren]], is similar to [[getChildren]] but returns the
* child nodes of the tree when [[CpNode]] is seen as a MAT vertex point (as
* opposed to edge). In this way the dual graph of the tree can easily be
* traversed - see e.g. [[traverseVertices]]. Generally, however, traversing the
* edges is preferred as it returns the entire Medial Axis (by utilizing
* [[getCurveToNext]] on each returned edge).
*
* It may be worth mentioning that by traversing from the CpNode by following
* [[next]] repeatedly until one is back at the same CpNode allows one
* to 'go around' the shape boundary and at the same time traverse the MAT twice
* in opposite directions.
*/
class CpNode {
/**
* Primarily for internal use.
* @param cp The shape boundary contact point, i.e. a [[CpNode]] without its
* edges.
* @param isHoleClosing If true, this [[CpNode]] belongs to a hole-closing
* maximal disk.
* @param isIntersection true if this cpNode is at a shape boundary
* intersection point, false otherwise
* @param prev The previous (going clockwise around the boundary) contact
* point ([[CpNode]]).
* @param next The next (going anti-clockwise around the boundary) contact
* point ([[CpNode]]).
* @param prevOnCircle The previous [[CpNode]] (going clockwise around
* the inscribed circle defined by the maximal disk).
* @param nextOnCircle The next [[CpNode]] (going anti-clockwise around
* the inscribed circle defined by the maximal disk).
*/
constructor(
public readonly cp : ContactPoint,
public isHoleClosing : boolean,
public isIntersection : boolean,
public prev : CpNode = undefined,
public next : CpNode = undefined,
public prevOnCircle : CpNode = undefined,
public nextOnCircle : CpNode = undefined) {
}
/**
* Returns the bezier curve from the maximal disk of this [[CpNode]] to the
* next [[CpNode]]'s maximal disk and thus directly represents a piece of the
* medial axis.
* @deprecated Use [[getCurveToNext]] instead
* @param cpNode
*/
get matCurveToNextVertex() {
return getCurveToNext(this);
}
/**
* Primarily for internal use.
*
* Compares the order of two [[CpNode]]s. The order is cyclic and depends
* on a [[CpNode]]'s relative position along the shape boundary.
*/
static comparator =
(a: CpNode, b: CpNode) => compareCps(a.cp, b.cp);
/**
* Returns the children of this [[CpNode]] when seen as a MAT edge. Only
* children in a 'forward' direction are returned. These include all edges
* except the 'backward' edge given by [[prevOnCircle]], even terminating
* edges.
*/
getChildren() {
let children: CpNode[] = [];
let cp = this.next;
let cp_ = cp;
do {
children.push(cp_);
cp_ = cp_.nextOnCircle;
} while (cp_.nextOnCircle !== cp);
return children;
}
static remove = removeCpNode;
/**
* Similar to [[getChildren]] but returns the child nodes of the tree when
* [[CpNode]] is seen as a MAT vertex point (as opposed to edge). In this
* way the dual graph of the tree can easily be traversed - see e.g.
* [[traverseVertices]]. Generally, however, traversing the edges is
* preferred as it returns the entire Medial Axis (by utilizing
* [[getCurveToNext]] on each returned edge).
*/
get vertexChildren() {
if (this.isTerminating()) { return []; }
let cp: CpNode = this;
let children: CpNode[] = [];
let cp_ = cp;
while (cp_ !== cp.prevOnCircle) {
if (!cp_.isTerminating()) { children.push(cp_.next); }
cp_ = cp_.nextOnCircle;
}
return children;
}
/**
* Returns all [[CpNode]]s on the MAT that this [[CpNode]] is part of
* starting from the current one and going anti-clockwise around the shape.
*/
public getAllOnLoop() {
let cpStart: CpNode = this;
let cps: CpNode[] = [cpStart];
let cp: CpNode = this.next;
while (cp !== cpStart) {
cps.push(cp);
cp = cp.next;
}
return cps;
}
/**
* Primarily for internal use.
*
* Insert a [[CpNode]] into the MAT tree graph after the specified point
* and returns the freshly inserted [[CpNode]].
* @param isHoleClosing True if this is a hole closing contact point.
* @param isIntersection True if this is a contact point at a shape boundary
* intersection point.
* @param cpTree The tree graph holding the [[CpNodes]] of the MAT.
* @param cp [[ContactPoint]] defining the [[CpNode]].
* @param prev_ Inserts the new [[CpNode]] right after this item if the
* loop is not empty, else insert the new [[CpNode]] as the only item in the
* loop.
*/
public static insert(
isHoleClosing: boolean,
isIntersection: boolean,
cpTree: LlRbTree<CpNode>,
cp: ContactPoint,
prev_: CpNode) {
let cpNode = new CpNode(cp, isHoleClosing, isIntersection);
if (typeof _debug_ !== 'undefined') {
_debug_.generated.elems.cpNode.push({
generated: _debug_.generated,
cpNode
});
}
let prev;
let next;
if (!prev_) {
prev = cpNode;
next = cpNode;
} else {
prev = prev_;
next = prev.next;
}
next.prev = cpNode;
prev.next = cpNode;
cpNode.prev = prev;
cpNode.next = next;
cpTree.insert(cpNode);
return cpNode;
}
/**
* Return this (except if exclThis is truthy) and the the other CpNodes
* around the maximal disk vertex circle in an anti-clockwise order.
* @param exclThis If true the returned array does not include this
* [[CpNode]].
*/
public getCpNodesOnCircle(exclThis = false) {
let startCp = this as CpNode;
let cp = startCp;
let cps: CpNode[] = [];
do {
if (exclThis) {
exclThis = false;
} else {
cps.push(cp);
}
cp = cp.nextOnCircle;
} while (cp !== startCp)
return cps;
}
/**
* Returns true if the 2 given [[CpNode]]s are on the same maximal disk
* circle.
* @param cpNode1 A [[CpNode]].
* @param cpNode2 Another [[CpNode]]
*/
static isOnSameCircle(cpNode1: CpNode, cpNode2: CpNode) {
let cpNodes = cpNode1.getCpNodesOnCircle(true);
return cpNodes.indexOf(cpNode2) >= 0;
}
/**
* Returns true if this [[CpNode]] is terminating, i.e. implies a leaf MAT
* vertex.
*
* This is always the case for sharp corners and maximal disks with
* a single contact point. Note, however, that even in these cases there are
* two contact points stored (s
* itting 'on top' of each other) for the
* maximal disk. It can be seen as a limiting case of a two-prong where the
* distance between two of the contact points tend to zero. One point
* (represented by a [[CpNode]] of course) will be terminating with the
* other point being its [[next]], whereas the other point will *not* be
* terminating and 'points' back into the shape.
*/
public isTerminating() {
return this === this.next.prevOnCircle;
}
/**
* Like isTerminating() but only returns true if all cpNodes on the circle
* (except this.prevOnCircle) is terminating.
*/
public isFullyTerminating() {
let otherOnCircle = this.prevOnCircle.getCpNodesOnCircle(true);
let isFullyTerminating = otherOnCircle.every(cpNode => cpNode.isTerminating());
return isFullyTerminating;
}
/**
* Returns the first [[CpNode]] (from this one by successively applying
* .nextOnCircle) that exits the circle.
*/
public getFirstExit() {
let startNode = this as CpNode;
let cpNode = startNode;
while (cpNode.next === cpNode.prevOnCircle) {
cpNode = cpNode.next;
if (cpNode === startNode) {
// The very special case the MAT is a single point.
return undefined;
}
}
return cpNode;
}
/**
* Returns true if this [[CpNode]] represents a sharp corner, i.e. the
* limiting case of a two-prong having zero radius.
*
* Note that two [[CpNode]]s are stored for each sharp corner, one being
* terminating and one not. See [[isTerminating]] for more details.
*/
public isSharp() {
return this.cp.circle.radius === 0;
}
/**
* Returns true if this [[CpNode]]'s maximal disk has only one contact point
* on the shape boundary (up to planar coordinates). These includes sharp
* corners.
*
* Note, however, that two [[CpNode]]s are stored for each such point to
* preserve symmetry - see [[isTerminating]] for more details.
*/
public isOneProng() {
let cp1 = this;
if (cp1.cp.circle.radius === 0) {
return true;
}
let cp2 = cp1.nextOnCircle;
let p1 = cp1.cp.pointOnShape.p;
let p2 = cp2.cp.pointOnShape.p;
return (p1[0] === p2[0] && p1[1] === p2[1]);
}
/**
* Returns the number of contact points on the maximal disk circle implied
* by this [[CpNode]].
*
* Note, however, that even one-prongs and sharp corners will return 2 (see
* [[isTerminating]] for more details); if this is not desired use
* [[getRealProngCount]] instead which will return 1 in these cases.
*/
public getProngCount() {
let startCp = this as CpNode;
let cp = startCp;
let i = 0;
do {
i++;
cp = cp.nextOnCircle;
} while (cp !== startCp)
return i;
}
/**
* Returns the number of contact points (up to planar coordinates) on the
* maximal disk circle implied by this [[CpNode]].
*
* See also [[getProngCount]].
*/
public getRealProngCount() {
if (this.isOneProng()) { return 1; }
return this.getProngCount();
}
}
export { CpNode } | the_stack |
import {
buildForm,
buildDescriptionField,
buildMultiField,
buildRepeater,
buildSection,
buildSubSection,
buildTextField,
CustomField,
buildCustomField,
} from '@island.is/application/core'
import {
convertFormToScreens,
findCurrentScreen,
getNavigableSectionsInForm,
screenHasBeenAnswered,
screenRequiresAnswer,
} from './reducerUtils'
import { FormScreen, MultiFieldScreen, RepeaterScreen } from '../types'
describe('reducerUtils', () => {
describe('find current screen', () => {
const buildIntroScreen = (
id: string,
isNavigable = true,
sectionIndex = -1,
subSectionIndex = -1,
) => ({
...buildDescriptionField({
id,
title: 'Introduction',
description: 'welcome',
}),
isNavigable,
sectionIndex,
subSectionIndex,
})
const buildTextScreen = (
id: string,
isNavigable = true,
sectionIndex = -1,
subSectionIndex = -1,
) => ({
...buildTextField({
id: id,
title: 'What is the family name?',
}),
isNavigable,
sectionIndex,
subSectionIndex,
})
const buildCustomScreen = (
id: string,
isNavigable = true,
options: Omit<
CustomField,
'id' | 'type' | 'component' | 'title' | 'children'
> = {},
) => ({
...buildCustomField({
id,
title: 'Custom Title',
component: 'CustomComponent',
}),
...options,
isNavigable,
sectionIndex: -1,
subSectionIndex: -1,
})
const convertScreens = (
screens: FormScreen[],
answers = {},
externalData = {},
) =>
convertFormToScreens(
buildForm({
id: 'ExampleForm',
title: 'asdf',
children: screens,
}),
answers,
externalData,
)
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildTextScreen('a'),
buildTextScreen('b'),
buildTextScreen('c'),
]
it('should default to the first screen if there are no answers', () => {
expect(findCurrentScreen(convertScreens(screens), {})).toBe(0)
})
it('should default to the first screen if the answers dont really match the list of screens', () => {
expect(
findCurrentScreen(convertScreens(screens), {
random: 'asdf',
notThis: '4',
}),
).toBe(0)
})
it('should go to the screen where the last answer belongs to the screen before', () => {
expect(findCurrentScreen(convertScreens(screens), { a: 'answer' })).toBe(
2,
)
expect(findCurrentScreen(convertScreens(screens), { b: 'answer' })).toBe(
0,
)
expect(
findCurrentScreen(convertScreens(screens), {
a: 'answer',
b: 'answer',
}),
).toBe(3)
})
it('should go to the screen missing an answer', () => {
expect(
findCurrentScreen(convertScreens(screens), {
a: 'answer',
c: 'answer',
}),
).toBe(2)
})
it('should, if the last answer is in a partially answered multifield, go to that screen', () => {
const screens = [
buildIntroScreen('intro'),
buildMultiField({
id: 'multifield',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'This is a great screen',
}) as MultiFieldScreen,
buildTextScreen('c'),
]
const answers1 = { a: 'sick' }
const answers2 = { b: 'very sick' }
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(1)
expect(
findCurrentScreen(convertScreens(screens, answers2), answers2),
).toBe(1)
})
it('should, if the last answer is in a fully answered multifield, go to the next screen after', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildMultiField({
id: 'multifield',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'This is a great screen',
}) as MultiFieldScreen,
buildTextScreen('c'),
]
const answers = { a: 'sick', b: 'very sick' }
expect(findCurrentScreen(convertScreens(screens, answers), answers)).toBe(
2,
)
})
it('should, if the last answer is a fully built repeater, go to the repeater screen', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildTextScreen('first'),
buildRepeater({
id: 'person',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
buildTextScreen('c'),
]
const answers1 = { person: [{ a: '1', b: '2' }] }
const answers2 = { first: 'answer', person: [{ a: '1', b: '2' }] }
const answers3 = { person: [] }
const answers4 = { first: 'asdf' }
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(0)
expect(
findCurrentScreen(convertScreens(screens, answers2), answers2),
).toBe(5)
// Empty repeater equals no answer so in this case we should go to the first screen
expect(
findCurrentScreen(convertScreens(screens, answers3), answers3),
).toBe(0)
expect(
findCurrentScreen(convertScreens(screens, answers4), answers4),
).toBe(2)
})
it('should, go to the last remaining answer in a repeater when everything before it has been answered', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildTextScreen('first'),
buildRepeater({
id: 'person',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
buildTextScreen('c'),
]
const answers1 = { first: 'hello', person: [{ a: '1' }] }
const convertedScreens = convertScreens(screens, answers1)
expect(findCurrentScreen(convertedScreens, answers1)).toBe(4)
})
it('should only skip fields that do not require answers when they come before an answer', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro1'),
buildTextScreen('first'),
buildIntroScreen('intro2'),
buildIntroScreen('intro3'),
buildTextScreen('second'),
buildIntroScreen('done'),
]
const answers1 = {}
const answers2 = { first: 'something' }
const answers3 = { first: 'something', second: 'more' }
// No answer, so we should go to intro1
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(0)
// We have an answer to 'first' go to intro2
expect(
findCurrentScreen(convertScreens(screens, answers2), answers2),
).toBe(2)
// We have an answer to 'first' and 'second' so we're done
expect(
findCurrentScreen(convertScreens(screens, answers3), answers3),
).toBe(5)
})
it('should not stop on repeater, even if it has an answer, if a previous question is missing an answer', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro1'),
buildTextScreen('first'),
buildRepeater({
id: 'person',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
buildTextScreen('c'),
]
const screensNoIntro = screens.slice(1)
const answers1 = { person: [{ a: '1' }] }
// We expect to go to the first intro screen before the question with the missing answer
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(0)
// Or the question if there was no intro screen
expect(
findCurrentScreen(convertScreens(screensNoIntro, answers1), answers1),
).toBe(0)
})
it('should stop in the middle of a repeater if it has a missing answer', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro1'),
buildTextScreen('first'),
buildRepeater({
id: 'person',
children: [
buildTextScreen('a'),
buildTextScreen('b'),
buildTextScreen('c'),
],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
buildTextScreen('c'),
]
const answers1 = { first: 'hello', person: [{ a: '1' }] }
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(4)
})
it('should not skip a repeater with missing answers', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildTextScreen('first'),
buildRepeater({
id: 'person',
children: [
buildTextScreen('a'),
buildTextScreen('b'),
buildTextScreen('c'),
],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
buildTextScreen('second'),
buildIntroScreen('outro'),
]
const answers1 = { first: 'hello', person: [] }
const answers2 = { first: 'hello' }
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(2)
expect(
findCurrentScreen(convertScreens(screens, answers2), answers2),
).toBe(2)
})
it('should not jump too far when skipping a multifield', () => {
const screens: FormScreen[] = [
buildIntroScreen('intro'),
buildTextScreen('first'),
buildMultiField({
id: 'multifield',
children: [
buildTextScreen('multi.a'),
buildTextScreen('multi.b'),
buildTextScreen('multi.c'),
],
title: 'This is a great screen',
}) as MultiFieldScreen,
buildTextScreen('second'),
buildTextScreen('third'),
buildTextScreen('fourth'),
buildTextScreen('fifth'),
buildIntroScreen('outro'),
]
// multifield incomplete
const answers1 = {
first: 'hello',
multi: {
a: 'answer',
b: 'answer',
},
}
// multifield complete
const answers2 = {
first: 'hello',
multi: {
a: 'answer',
b: 'answer',
c: 'answer',
},
}
// multifield complete, missing third answer
const answers3 = {
first: 'hello',
multi: {
a: 'answer',
b: 'answer',
c: 'answer',
},
second: 'answer',
fourth: 'answer',
}
expect(
findCurrentScreen(convertScreens(screens, answers1), answers1),
).toBe(2)
expect(
findCurrentScreen(convertScreens(screens, answers2), answers2),
).toBe(3)
expect(
findCurrentScreen(convertScreens(screens, answers3), answers3),
).toBe(4)
})
describe('utility functions', () => {
// Sets defaults when seeing if you can navigate to screens
// Used when not set manually
const getConvertedScreen = (screen: FormScreen, answers = {}) => {
const converted = convertScreens([screen], answers)
return converted.length > 0 ? converted[0] : screen
}
describe('screenRequiresAnswer', () => {
it('should require answer for text screen', () => {
expect(
screenRequiresAnswer(getConvertedScreen(buildTextScreen('id'))),
).toBe(true)
})
it('should require answer for multi field', () => {
expect(
screenRequiresAnswer(
getConvertedScreen(
buildMultiField({
id: 'multifield',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'title',
}) as MultiFieldScreen,
),
),
).toBe(true)
})
it('should not require answer for multi field with all child screens not navigable', () => {
expect(
screenRequiresAnswer(
buildMultiField({
id: 'multifield',
children: [
buildTextScreen('a', false),
buildTextScreen('b', false),
],
title: 'title',
}) as MultiFieldScreen,
),
).toBe(false)
})
it('should always require answer for repeater', () => {
expect(
screenRequiresAnswer(
getConvertedScreen(
buildRepeater({
id: 'person',
children: [
buildTextScreen('a'),
buildTextScreen('b'),
buildTextScreen('c'),
],
title: 'This is a great screen',
component: 'SomeComponent',
}) as RepeaterScreen,
),
),
).toBe(true)
})
it('should never require answer for a description field', () => {
expect(screenRequiresAnswer(buildIntroScreen('intro'))).toBe(false)
})
it('should require an answer for custom unless doesNotRequireAnswer is set to true', () => {
expect(screenRequiresAnswer(buildCustomScreen('custom'))).toBe(true)
expect(
screenRequiresAnswer(
buildCustomScreen('custom', true, { doesNotRequireAnswer: true }),
),
).toBe(false)
})
})
describe('screenHasBeenAnswered', () => {
it('should always say a description screen has been answered', () => {
const answers = {}
expect(
screenHasBeenAnswered(buildIntroScreen('intro'), answers),
).toBe(true)
})
it('should only say a text has been answered if it has a value', () => {
const answers1 = {}
const answers2 = { a: 'answer' }
expect(screenHasBeenAnswered(buildTextScreen('a'), answers1)).toBe(
false,
)
expect(screenHasBeenAnswered(buildTextScreen('a'), answers2)).toBe(
true,
)
})
it('should say a repeater has been answered if some or all questions have been answered', () => {
const answers1 = {}
const answers2 = { repeater: [] }
const answers3 = { repeater: [{ a: 'answer' }] }
const answers4 = { repeater: [{ b: 'answer' }] }
const answers5 = { repeater: [{ a: 'answer', b: 'answer' }] }
const buildRepeaterForTest = () =>
buildRepeater({
id: 'repeater',
children: [buildTextScreen('a'), buildTextScreen('b')],
title: 'Repeater',
component: 'RepeaterComponent',
}) as RepeaterScreen
const repeater1 = getConvertedScreen(buildRepeaterForTest(), answers1)
const repeater2 = getConvertedScreen(buildRepeaterForTest(), answers2)
const repeater3 = getConvertedScreen(buildRepeaterForTest(), answers3)
const repeater4 = getConvertedScreen(buildRepeaterForTest(), answers4)
const repeater5 = getConvertedScreen(buildRepeaterForTest(), answers5)
expect(screenHasBeenAnswered(repeater1, answers1)).toBe(false)
expect(screenHasBeenAnswered(repeater2, answers2)).toBe(false)
expect(screenHasBeenAnswered(repeater3, answers3)).toBe(true)
expect(screenHasBeenAnswered(repeater4, answers4)).toBe(true)
expect(screenHasBeenAnswered(repeater5, answers5)).toBe(true)
})
it('should only say a multi field has been answered if all screens requiring an answer have been answered', () => {
const buildMultiFieldForTest = () => {
const screen = buildMultiField({
id: 'multifield',
children: [
buildTextScreen('a', true),
buildTextScreen('b', true),
buildTextScreen('c', false),
],
title: 'title',
}) as MultiFieldScreen
screen.isNavigable = true
return screen
}
const answers1 = {}
const answers2 = { a: 'answer' }
const answers3 = { b: 'answer' }
const answers4 = { c: 'answer' }
const answers5 = { a: 'answer', c: 'answer' }
const answers6 = { b: 'answer', c: 'answer' }
const answers7 = { a: 'answer', b: 'answer' }
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers1),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers2),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers3),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers4),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers5),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers6),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers7),
).toBe(true)
})
it('should say a multi screen has been partly answered when some answers are missing', () => {
const buildMultiFieldForTest = () => {
const screen = buildMultiField({
id: 'multifield',
children: [
buildTextScreen('a', true),
buildTextScreen('b', true),
buildTextScreen('c', true),
],
title: 'title',
}) as MultiFieldScreen
screen.isNavigable = true
return screen
}
const answers1 = {}
const answers2 = { a: 'answer' }
const answers3 = { b: 'answer' }
const answers4 = { c: 'answer' }
const answers5 = { a: 'answer', c: 'answer' }
const answers6 = { b: 'answer', c: 'answer' }
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers1, true),
).toBe(false)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers2, true),
).toBe(true)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers3, true),
).toBe(true)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers4, true),
).toBe(true)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers5, true),
).toBe(true)
expect(
screenHasBeenAnswered(buildMultiFieldForTest(), answers6, true),
).toBe(true)
})
it('should look up answers for custom fields by id as long as childInputIds is not set', () => {
const normalCustomScreen = buildCustomScreen(
'customWithNoChildInputIds',
)
const customWithChildInputIds = buildCustomScreen(
'customWithNoChildInputIds',
true,
{
childInputIds: ['a', 'b', 'c'],
},
)
const answers0 = {}
const answers1 = { customWithNoChildInputIds: 'answer' }
const answers2 = { a: 'answer' }
const answers3 = { a: 'answer', b: 'answer' }
const answers4 = { a: 'answer', b: 'answer', c: 'answer' }
expect(screenHasBeenAnswered(normalCustomScreen, answers0)).toBe(
false,
)
expect(screenHasBeenAnswered(customWithChildInputIds, answers0)).toBe(
false,
)
expect(screenHasBeenAnswered(normalCustomScreen, answers1)).toBe(true)
expect(screenHasBeenAnswered(customWithChildInputIds, answers1)).toBe(
false,
)
expect(screenHasBeenAnswered(customWithChildInputIds, answers2)).toBe(
false,
)
expect(screenHasBeenAnswered(customWithChildInputIds, answers3)).toBe(
false,
)
expect(screenHasBeenAnswered(customWithChildInputIds, answers4)).toBe(
true,
)
})
})
})
})
describe('get navigable sections in form', () => {
const firstSection = buildSection({
id: '1',
title: 'first',
children: [],
})
const secondSection = buildSection({
id: '2',
title: 'second',
children: [],
})
const thirdSection = buildSection({
id: '3',
title: 'third',
children: [],
})
it('should return all sections if no section has a condition', () => {
const sections = [firstSection, secondSection, thirdSection]
const form = buildForm({
id: 'ExampleForm',
children: sections,
title: 'asdf',
})
expect(getNavigableSectionsInForm(form, {}, {})).toEqual(sections)
})
it('should only return sections that have non-violated conditions', () => {
const sections = [
firstSection,
buildSection({
id: '2',
title: 'second',
children: [],
condition: () => false,
}),
thirdSection,
]
const form = buildForm({
id: 'ExampleForm',
children: sections,
title: 'asdf',
})
expect(getNavigableSectionsInForm(form, {}, {})).toEqual([
firstSection,
thirdSection,
])
})
it('should only return non-condition-violating sub-sections of sections', () => {
const subSection = buildSubSection({
id: 'sub1',
title: 'sub1',
children: [],
condition: () => false,
})
const subSection2 = buildSubSection({
id: 'sub2',
title: 'sub2',
children: [],
condition: () => true,
})
const subSection3 = buildSubSection({
id: 'sub3',
title: 'sub3',
children: [],
})
const sections = [
firstSection,
secondSection,
buildSection({
id: 'withSubsections',
title: 'sick',
children: [subSection, subSection2, subSection3],
}),
]
const form = buildForm({
id: 'ExampleForm',
children: sections,
title: 'asdf',
})
expect(getNavigableSectionsInForm(form, {}, {})).toEqual([
firstSection,
secondSection,
buildSection({
id: 'withSubsections',
title: 'sick',
children: [subSection2, subSection3],
}),
])
})
})
describe('convert form to screens', () => {
describe('conditions', () => {
it('should hide all fields that belong to a section that violates condition', () => {
const invisibleSection = buildSection({
id: '1',
title: 'where am i',
condition: () => false,
children: [
buildTextField({ id: '1', title: '1' }),
buildTextField({ id: '2', title: '2' }),
],
})
const visibleSection = buildSection({
id: '2',
title: 'visible',
condition: () => true,
children: [
buildTextField({ id: '3', title: '3' }),
buildTextField({ id: '4', title: '4' }),
buildTextField({ id: '5', title: '5' }),
],
})
const form = buildForm({
id: 'ExampleForm',
title: 'asdf',
children: [invisibleSection, visibleSection],
})
const screens = convertFormToScreens(form, {}, {})
expect(screens.length).toBe(5)
expect(screens[0].isNavigable).toBe(false)
expect(screens[0].id).toBe('1')
expect(screens[1].isNavigable).toBe(false)
expect(screens[1].id).toBe('2')
expect(screens[2].isNavigable).toBe(true)
expect(screens[2].id).toBe('3')
expect(screens[3].isNavigable).toBe(true)
expect(screens[3].id).toBe('4')
expect(screens[4].isNavigable).toBe(true)
expect(screens[4].id).toBe('5')
})
})
describe('multifield', () => {
it('should convert multifield to a single screen', () => {
const multifield = buildMultiField({
id: 'multi',
title: 'multi',
children: [
buildTextField({ id: '1', title: '1' }),
buildTextField({ id: '2', title: '2' }),
buildTextField({ id: '3', title: '3' }),
buildTextField({ id: '4', title: '4' }),
buildTextField({ id: '5', title: '5' }),
],
})
const form = buildForm({
id: 'ExampleForm',
title: 'asdf',
children: [multifield],
})
const screens = convertFormToScreens(form, {}, {})
expect(screens.length).toBe(1)
expect(screens[0].id).toBe('multi')
})
})
describe('repeaters', () => {
const children = [
buildTextField({ id: '1', title: '1' }),
buildTextField({ id: '2', title: '2' }),
]
const repeater = {
...buildRepeater({
id: 'id',
title: 'repeater',
component: 'asdf',
children,
}),
}
it('should only include the repeater screen if it has not been expanded', () => {
const form = buildForm({
id: 'ExampleForm',
title: 'asdf',
children: [repeater],
})
const screens = convertFormToScreens(form, {}, {})
expect(screens.length).toBe(1)
expect(screens[0]).toEqual({
...repeater,
sectionIndex: -1,
subSectionIndex: -1,
isNavigable: true,
})
})
})
describe('sections and subsections', () => {
it('should attach the index of the section and possibly subsections which own the screens', () => {
const invisibleSection = buildSection({
id: '1',
title: 'where am i',
condition: () => false,
children: [
buildTextField({ id: '1', title: '1' }),
buildSubSection({
id: 'sub1',
title: 'sub1',
children: [buildTextField({ id: '2', title: '2' })],
}),
],
})
const visibleSection = buildSection({
id: '2',
title: 'visible',
condition: () => true,
children: [
buildSubSection({
id: 'sub2',
title: 'sub2',
children: [
buildTextField({ id: '3', title: '3' }),
buildTextField({ id: '4', title: '4' }),
],
}),
buildSubSection({
id: 'sub3',
title: 'sub3',
children: [buildTextField({ id: '5', title: '5' })],
}),
],
})
const form = buildForm({
id: 'ExampleForm',
title: 'asdf',
children: [
invisibleSection,
visibleSection,
buildTextField({
id: 'noSection',
title: 'Part of no section nor parent',
}),
],
})
const screens = convertFormToScreens(form, {}, {})
expect(screens.length).toBe(6)
expect(screens[0].sectionIndex).toBe(-1)
expect(screens[0].subSectionIndex).toBe(-1)
expect(screens[0].id).toBe('1')
expect(screens[1].sectionIndex).toBe(-1)
expect(screens[1].subSectionIndex).toBe(-1)
expect(screens[1].id).toBe('2')
expect(screens[2].sectionIndex).toBe(0)
expect(screens[2].subSectionIndex).toBe(0)
expect(screens[2].id).toBe('3')
expect(screens[3].sectionIndex).toBe(0)
expect(screens[3].subSectionIndex).toBe(0)
expect(screens[3].id).toBe('4')
expect(screens[4].sectionIndex).toBe(0)
expect(screens[4].subSectionIndex).toBe(1)
expect(screens[4].id).toBe('5')
expect(screens[5].sectionIndex).toBe(0) // a orphaned field will inherit the last section before
expect(screens[5].subSectionIndex).toBe(-1)
expect(screens[5].id).toBe('noSection')
})
})
})
}) | the_stack |
import Group from "./cachehandler/group";
import LimitGroup from "./cachehandler/limitGroup";
import SuperSet from "./cachehandler/superSet";
import {Client, ShardingManager} from "discord.js";
declare module "aoi.js" {
import {EventEmitter} from "events";
type ErrorMsg = string | Record<string, string | boolean | object | any[]>;
//AoiError
class AoiError {
constructor();
static CallbackError(callback: string, intent: string, line?: number): void;
static CommandError(
command: string,
type: string,
name: string,
position?: number,
): void;
static makeMessageError<Channel>(
client: Bot,
channel: Channel,
message: ErrorMsg,
options?: Record<string, unknown>,
): void;
static consoleError(errorname: string, errorMsg: ErrorMsg): void;
static functionErrorResolve<d>(
d: d,
type: string,
object: object,
message: ErrorMsg,
): void;
static fnError<d>(
d: d,
type: string,
object: object,
message: ErrorMsg,
): void;
}
//Blacklist
type BlacklistTypes = "globalUser" | "server" | "channel" | "role" | "user";
class Blacklist {
constructor(client: Bot);
public setBlacklist(type: BlacklistTypes, errorMsg: ErrorMsg): void;
public blacklistIds(type: BlacklistTypes, ids: string[]): void;
public whitelistIds(type: BlacklistTypes, ids: string[]): void;
public get types(): BlacklistTypes;
public getBlacklistTable(
type: "all" | "globalUser" | "server" | "channel" | "role" | "user",
): string;
}
type IntentOptions = "all" | string[];
type DatabaseOption<Database> = {
type:
| "default"
| "dbdjs.db"
| "dbdts.db"
| "dbdjs.mongo"
| "dbdjs.db-sql"
| "custom";
db: Database;
path?: string;
tables?: Array<string>;
extraOptions?: Record<string, any>;
promisify?: boolean;
};
type RespondOnEditOptions = {
commands?: boolean;
alwaysExecute?: boolean;
nonPrefixed?: boolean;
time?: number;
};
type CacheOptions = Record<string, number | undefined | null>;
type EventOptions = {
functionError?: boolean;
timeout?: boolean;
};
type ClientOptions = {
token: string;
prefix: string | Array<string>;
intents: IntentOptions;
database?: DatabaseOption<any>;
respondOnEdit?: RespondOnEditOptions;
cache: CacheOptions;
mobilePlatform?: boolean;
fetchInvites?: {
enabled: boolean;
cacheInviters?: boolean;
};
suppressAllErrors?: boolean;
errorMessage?: Array<string>;
events?: EventOptions;
disableFunctions?: Array<string>;
autoUpdate?: boolean;
};
interface BaseCommand {
code: string;
}
interface EventCommand extends BaseCommand {
name?: string;
channel?: string;
[key: string]: any;
}
interface Command extends BaseCommand {
name: string;
aliases?: string | Array<string>;
async?: boolean;
whitelist?: boolean;
nonPrefixed?: boolean;
error?: string;
}
interface AwaitCommand extends BaseCommand {
name: string;
}
interface InteractionCommand extends BaseCommand {
name: string | Array<string>;
prototype: "application" | "button" | "selectMenu";
}
interface CustomEventCommand extends BaseCommand {
name: string;
listen: string;
}
interface LoopCommand extends BaseCommand {
every: number;
channel?: string;
executeOnStartup?: boolean;
}
type StatusOption = {
text: string;
url?: string;
time: number;
shardId?: number;
type:
| "PLAYING"
| "LISTENING"
| "WATCHING"
| "STREAMING"
| "playing"
| "listening"
| "streaming"
| "watching";
};
class BaseClient extends Client {
aoiOptions: Record<string, any>;
interactionManager: InteractionManager;
cacheManager: CacheManager;
variableManager: any /*VariableManager*/;
blacklist: Blacklist;
_api: string;
prefix: string | string[];
db: any /*AoijsAPI | DbdTsDb | CustomDb | Promisify*/;
statuses: Group;
constructor(options: ClientOptions);
public status(d: StatusOption[]): void;
public variables(data: object, table?: string): void;
}
class Bot extends BaseClient {
cmd: CommandManager;
functionManager: FunctionManager;
constructor(options: ClientOptions);
public command(d: Command): void;
public awaitedCommand(d: AwaitCommand): void;
public deletedCommand(d: EventCommand): void;
public updateCommand(d: EventCommand): void;
public bulkDeleteCommand(d: EventCommand): void;
public guildJoinCommand(d: EventCommand): void;
public guildLeaveCommand(d: EventCommand): void;
public guildUpdateCommand(d: EventCommand): void;
public guildUnavailableCommand(d: EventCommand): void;
public roleCreateCommand(d: EventCommand): void;
public roleUpdateCommand(d: EventCommand): void;
public roleDeleteCommand(d: EventCommand): void;
public channelCreateCommand(d: EventCommand): void;
public channelUpdateCommand(d: EventCommand): void;
public channelDeleteCommand(d: EventCommand): void;
public channelPinsUpdateCommand(d: EventCommand): void;
public stageInstanceCreateCommand(d: EventCommand): void;
public stageInstanceUpdateCommand(d: EventCommand): void;
public stageInstanceDeleteCommand(d: EventCommand): void;
public threadCreateCommand(d: EventCommand): void;
public threadUpdateCommand(d: EventCommand): void;
public threadDeleteCommand(d: EventCommand): void;
public threadListSyncCommand(d: EventCommand): void;
public threadMemberUpdateCommand(d: EventCommand): void;
public joinCommand(d: EventCommand): void;
public leaveCommand(d: EventCommand): void;
public memberUpdateCommand(d: EventCommand): void;
public threadMembersUpdateCommand(d: EventCommand): void;
public memberAvailableCommand(d: EventCommand): void;
public membersChunkCommand(d: EventCommand): void;
public emojiCreateCommand(d: EventCommand): void;
public emojiDeleteCommand(d: EventCommand): void;
public emojiUpdateCommand(d: EventCommand): void;
public banAddCommand(d: EventCommand): void;
public banRemoveCommand(d: EventCommand): void;
public inviteCreateCommand(d: EventCommand): void;
public inviteDeleteCommand(d: EventCommand): void;
public reactionAddCommand(d: EventCommand): void;
public reactionRemoveCommand(d: EventCommand): void;
public reactionRemoveAllCommand(d: EventCommand): void;
public reactionRemoveEmojiCommand(d: EventCommand): void;
public presenceUpdateCommand(d: EventCommand): void;
public voiceStateUpdateCommand(d: EventCommand): void;
public interactionCommand(d: InteractionCommand): void;
public applicationCmdCreateCommand(d: EventCommand): void;
public applicationCmdDeleteCommand(d: EventCommand): void;
public applicationCmdUpdateCommand(d: EventCommand): void;
public userUpdateCommand(d: EventCommand): void;
public variableCreateCommand(d: EventCommand): void;
public variableDeleteCommand(d: EventCommand): void;
public variableUpdateCommand(d: EventCommand): void;
public readyCommand(d: EventCommand): void;
public functionErrorCommand(d: EventCommand): void;
public loopCommand(d: LoopCommand): void;
public timeoutCommand(d: EventCommand): void;
public pulseCommand(d: EventCommand): void;
public rateLimitCommand(d: EventCommand): void;
public webhookUpdateCommand(d: EventCommand): void;
public onMessage(d?: { guildOnly?: boolean; respondToBot?: boolean }): void;
public onMessageDelete(): void;
public onMessageUpdate(): void;
public onMessageDeleteBulk(): void;
public onGuildJoin(): void;
public onGuildLeave(): void;
public onGuildUpdate(): void;
public onGuildUnavailable(): void;
public onRoleCreate(): void;
public onRoleUpdate(): void;
public onRoleDelete(): void;
public onChannelCreate(): void;
public onChannelUpdate(): void;
public onChannelDelete(): void;
public onChannelPinsUpdate(): void;
public onStageInstanceCreate(): void;
public onStageInstanceUpdate(): void;
public onStageInstanceDelete(): void;
public onThreadCreate(): void;
public onThreadUpdate(): void;
public onThreadDelete(): void;
public onThreadListSync(): void;
public onThreadMemberUpdate(): void;
public onThreadMembersUpdate(): void;
public onJoin(): void;
public onLeave(): void;
public onMemberUpdate(): void;
public onMemberAvailable(): void;
public onMembersChunk(): void;
public onEmojiCreate(): void;
public onEmojiDelete(): void;
public onEmojiUpdate(): void;
public onStickerCreate(): void;
public onStickerDelete(): void;
public onStickerUpdate(): void;
public onBanAdd(): void;
public onBanRemove(): void;
public onInviteCreate(): void;
public onInviteDelete(): void;
public onReactionAdd(): void;
public onReactionRemove(): void;
public onReactionRemoveAll(): void;
public onReactionRemoveEmoji(): void;
public onVoiceStateUpdate(): void;
public onPresenceUpdate(): void;
public onTypingStart(): void;
public onInteractionCreate(): void;
public onApplicationCmdCreate(): void;
public onApplicationCmdDelete(): void;
public onApplicationCmdUpdate(): void;
public onUserUpdate(): void;
public onVariableCreate(): void;
public onVariableDelete(): void;
public onVariableUpdate(): void;
public onRateLimit(): void;
public onWebhookUpdate(): void;
}
//cacheManager
type CacheTypes = "cache" | "limitCache" | "setCache";
class CacheManager {
constructor(client: Bot);
public get types(): CacheTypes;
public _validType(type: string): boolean;
public createCache(type: "cache", name: string): Group;
public createCache(type: "limitCache", name: string): LimitGroup;
public createCache(type: "setCache", name: string): SuperSet;
public deleteCache(type: "cache", name: string): Group;
public deleteCache(type: "limitCache", name: string): LimitGroup;
public deleteCache(type: "setCache", name: string): SuperSet;
public static _DjsCacheManager(cache: CacheOptions): any;
}
//ClientShard
class ClientShard extends ShardingManager {
file: string;
client: Bot;
constructor(file: string, options: object, client: Bot);
public onShardDisconnect(): void;
public onShardError(): void;
public onShardResume(): void;
public onShardReconnecting(): void;
public onShardReady(): void;
}
//CommandManager
class Command {
[key: string]: any;
__client__: Bot;
constructor(d: object, client: Bot);
public serializeFunctions(): string[];
public serializeCode(): void | string[];
public toString(): string;
public toArray(): [string, any][];
public keys(): string[];
public values(): unknown[];
}
class CommandManager {
client: Bot;
customCmds?: Array<string>;
constructor(client: Bot, formCommand?: boolean, customCmds?: string[]);
public get types(): string[];
public createCommand(d: any): void;
public formCommand(): void;
public formCustomCommand(customCmds: string[]): void;
}
//FunctionManager
class FunctionManager {
client: Bot;
maps: Record<string, string[]>;
functions: string[];
cache: Group;
interpreter: unknown;
constructor(client: Bot);
public cacheFunctions(): void;
public createCustomFunction(data: Array<Record<string, any>>): void;
public findFunctions(code: string): string[];
public serializeCode(code: string): string[];
}
//LoadCommands
class LoadCommands {
Client: Bot;
AddToClient?: boolean;
constructor(Client: Bot, AddToClient?: boolean);
public load(cmd: CommandManager, path: string, debug?: boolean): void;
public update(debug?: boolean): void;
public setColors(colors: object): void;
public get allColors(): object;
public get themes(): object;
}
class CustomEvent extends EventEmitter {
client: Bot;
commands: Group;
constructor(client: Bot);
command(d: CustomEventCommand): void;
listen(event: string): void;
}
type ApplcationOptionData = {
type:
| "SUB_COMMAND"
| "SUB_COMMAND_GROUP"
| "STRING"
| "INTEGER"
| "BOOLEAN"
| "USER"
| "CHANNEL"
| "ROLE"
| "MENTIONABLE"
| "NUMBER"
| number;
};
type ApplicationData = {
data: {
name: string;
description: void | string;
options?: object[];
type?: "CHAT_INPUT" | "USER" | "MESSAGE";
defaultPermission?: boolean;
};
guildId?: string;
};
class Interaction extends EventEmitter {
client: Bot;
constructor(client: Bot);
public resolve<Interaction>(interaction: Interaction): Interaction;
}
class InteractionManager extends Interaction {
client: Bot;
awaitComponents: unknown /*Await*/;
componentCollector: unknown /*CustomCollector*/;
buttonData: Group;
applicationData: Group;
selectMenuData: Group;
constructor(client: Bot);
public createApplicationData(d: ApplicationData): void;
public createButtonData(d: object): void;
public createSelectMenuData(d: object): void;
public stringifyApplicationData(name: string): string;
public resolveButtonData(name: string): string;
public resolveSelectMenuData(name: string): string;
public resolveSelectMenuOptionData(options: object[]): string;
public get buttonDataLength(): number;
}
} | the_stack |
import { Expect, throwExpect } from './Assert';
import { Action } from './Action';
import { Contract } from './Contract';
import { Record } from 'immutable';
import { Printer } from './Printer';
import { Invariant } from './Invariant';
import { SuccessInvariant } from './Invariant/SuccessInvariant';
import { RemainsInvariant } from './Invariant/RemainsInvariant';
import { StaticInvariant } from './Invariant/StaticInvariant';
import { Expectation } from './Expectation';
import { formatResult } from './ErrorReporter';
import { Invokation, InvokationOpts } from './Invokation';
import { Event } from './Event';
import { formatEvent } from './Formatter';
import { Map } from 'immutable';
import { Settings } from './Settings';
import { Accounts, loadAccounts } from './Accounts';
import Web3 from 'web3';
import { Saddle } from 'eth-saddle';
import { Command, Fetcher } from './Command';
import { Value} from './Value';
const startingBlockNumber = 1000;
type ContractIndex = { [address: string]: Contract };
type Counter = { value: number };
type EventDecoder = { [eventSignature: string]: (log: any) => any };
export interface WorldProps {
actions: Action<any>[];
event: Event | null;
lastInvokation: Invokation<any> | null;
newInvokation: boolean;
blockNumber: number;
gasCounter: Counter;
lastContract: Contract | null;
invariants: Invariant[];
expectations: Expectation[];
contractIndex: ContractIndex;
contractData: Map<string, object>;
expect: Expect;
web3: Web3 | null;
saddle: Saddle | null;
printer: Printer | null;
network: string | null;
dryRun: boolean;
verbose: boolean;
settings: Settings;
accounts: Accounts | null;
invokationOpts: InvokationOpts;
trxInvokationOpts: Map<string, any>;
basePath: string | null;
totalGas: number | null;
eventDecoder: EventDecoder;
fs: object | null;
commands: Command<any>[] | undefined;
fetchers: Fetcher<any, Value>[] | undefined;
}
const defaultWorldProps: WorldProps = {
actions: <Action<any>[]>[],
event: null,
lastInvokation: null,
newInvokation: false,
blockNumber: 0,
gasCounter: {value: 0},
lastContract: null,
invariants: [],
expectations: [],
contractIndex: {},
contractData: Map({}),
expect: throwExpect,
web3: null,
saddle: null,
printer: null,
network: null,
dryRun: false,
verbose: false,
settings: Settings.default(null, null),
accounts: null,
invokationOpts: {},
trxInvokationOpts: Map({}),
basePath: null,
totalGas: null,
eventDecoder: {},
fs: null,
commands: undefined,
fetchers: undefined,
};
export class World extends Record(defaultWorldProps) {
public readonly actions!: Action<any>[];
public readonly event!: Event | null;
public readonly value!: number | null;
public readonly lastInvokation!: Invokation<any> | null;
public readonly newInvokation!: boolean;
public readonly blockNumber!: number;
public readonly gasCounter!: Counter;
public readonly lastContract!: Contract | null;
public readonly invariants!: Invariant[];
public readonly expectations!: Expectation[];
public readonly contractIndex!: ContractIndex;
public readonly contractData!: Map<string, object>;
public readonly expect!: Expect;
public readonly web3!: Web3;
public readonly saddle!: Saddle;
public readonly printer!: Printer;
public readonly network!: string;
public readonly dryRun!: boolean;
public readonly verbose!: boolean;
public readonly settings!: Settings;
public readonly accounts!: Accounts;
public readonly invokationOpts!: InvokationOpts;
public readonly trxInvokationOpts!: Map<string, any>;
public readonly basePath!: string | null;
public constructor(values?: Partial<WorldProps>) {
values ? super(values) : super();
}
getInvokationOpts(baseOpts: InvokationOpts): InvokationOpts {
return {
...baseOpts,
...this.invokationOpts,
...this.value ? {value: this.value.toString()} : {}
};
}
isLocalNetwork(): boolean {
return this.network === 'test' || this.network === 'development' || this.network === 'coverage';
}
async updateSettings(fn: (settings: Settings) => Promise<Settings>): Promise<World> {
// TODO: Should we do an immutable update?
const newSettings = await fn(this.settings);
// TODO: Should we await or just let it clobber?
await newSettings.save();
return this.set('settings', newSettings);
}
defaultFrom(): string | null {
let settingsFrom = this.settings.findAlias('Me');
if (settingsFrom) {
return settingsFrom;
}
let accountsDefault = this.accounts.get('default');
if (accountsDefault) {
return accountsDefault.address;
}
return null;
}
}
export function loadInvokationOpts(world: World): World {
let networkOpts = {};
const networkOptsStr = process.env[`${world.network}_opts`];
if (networkOptsStr) {
networkOpts = JSON.parse(networkOptsStr);
}
return world.set('invokationOpts', networkOpts);
}
export function loadVerbose(world: World): World {
return world.set('verbose', !!process.env['verbose']);
}
export function loadDryRun(world: World): World {
return world.set('dryRun', !!process.env['dry_run']);
}
export async function loadSettings(world: World): Promise<World> {
if (world.basePath) {
return world.set('settings', await Settings.load(world.basePath, world.network));
} else {
return world;
}
}
export async function initWorld(
expect: Expect,
printer: Printer,
iweb3: Web3,
saddle: Saddle,
network: string,
accounts: string[],
basePath: string | null,
totalGas: number | null
): Promise<World> {
return new World({
actions: [],
event: null,
lastInvokation: null,
newInvokation: true,
blockNumber: startingBlockNumber,
gasCounter: {value: 0},
lastContract: null,
invariants: [new SuccessInvariant()], // Start with invariant success,
expectations: [],
contractIndex: {},
contractData: Map({}),
expect: expect,
web3: iweb3,
saddle: saddle,
printer: printer,
network: network,
settings: Settings.default(basePath, null),
accounts: loadAccounts(accounts),
trxInvokationOpts: Map({}),
basePath: basePath,
totalGas: totalGas ? totalGas : null,
eventDecoder: {},
fs: network === 'test' ? {} : null
});
}
export function setEvent(world: World, event: Event): World {
return world.set('event', event);
}
export function addAction(world: World, log: string, invokation: Invokation<any>): World {
const action = new Action(log, invokation);
world = world.update('actions', actions => actions.concat([action]));
// Print the action via the printer
world.printer.printAction(action);
return world.merge(world, {
lastInvokation: invokation,
newInvokation: true
});
}
export function addInvariant(world: World, invariant: Invariant): World {
return world.update('invariants', invariants => invariants.concat([invariant]));
}
export function addExpectation(world: World, expectation: Expectation): World {
return world.update('expectations', expectations => expectations.concat([expectation]));
}
function getInvariantFilter(type: string) {
let filters: { [filter: string]: (invariant: Invariant) => boolean } = {
all: _invariant => true,
success: invariant => !(invariant instanceof SuccessInvariant),
remains: invariant => !(invariant instanceof RemainsInvariant),
static: invariant => !(invariant instanceof StaticInvariant)
};
let filter = filters[type.toLowerCase()];
if (!filter) {
throw new Error(`Unknown invariant type \`${type}\` when wiping invariants.`);
}
return filter;
}
export function clearInvariants(world: World, type: string): World {
let filter = getInvariantFilter(type);
return world.update('invariants', invariants => world.invariants.filter(filter));
}
export function holdInvariants(world: World, type: string): World {
let filter = getInvariantFilter(type);
return world.update('invariants', invariants => {
return world.invariants.map(invariant => {
if (filter(invariant)) {
invariant.held = true;
}
return invariant;
});
});
}
export async function checkExpectations(world: World): Promise<World> {
if (!world.get('newInvokation')) {
return world;
} else {
// Lastly, check invariants each hold
await Promise.all(
world.get('expectations').map(expectation => {
// Check the expectation holds
return expectation.checker(world);
})
);
return world.set('expectations', []);
}
}
export async function checkInvariants(world: World): Promise<World> {
if (!world.get('newInvokation')) {
return world;
} else {
// Lastly, check invariants each hold
await Promise.all(
world.get('invariants').map(invariant => {
// Check the invariant still holds
if (!invariant.held) {
return invariant.checker(world);
}
})
);
// Remove holds
return world.update('invariants', invariants => {
return invariants.map(invariant => {
invariant.held = false;
return invariant;
});
});
}
}
export function describeUser(world: World, address: string): string {
// Look up by alias
let alias = Object.entries(world.settings.aliases).find(([name, aliasAddr]) => aliasAddr === address);
if (alias) {
return `${alias[0]} (${address.slice(0,6)}...)`;
}
// Look up by `from`
if (world.settings.from === address) {
return `root (${address.slice(0,6)}...)`;
}
// Look up by unlocked accounts
let account = world.accounts.find(account => account.address === address);
if (account) {
return `${account.name} (${address.slice(0,6)}...)`;
}
// Otherwise, just return the address itself
return address;
}
// Fails an assertion with reason
export function fail(world: World, reason: string): World {
if (world.event) {
world.expect(undefined).fail(`${reason} processing ${formatEvent(world.event)}`);
} else {
world.expect(undefined).fail(reason);
}
return world;
} | the_stack |
import React from 'react';
import {
Image,
ImageProps,
TouchableOpacity,
} from 'react-native';
import {
fireEvent,
render,
RenderAPI,
waitForElement,
} from 'react-native-testing-library';
import {
light,
mapping,
} from '@eva-design/eva';
import { Text } from '../text/text.component';
import { IndexPath } from '../../devsupport';
import { ApplicationProvider } from '../../theme';
import {
Select,
SelectProps,
} from './select.component';
import { SelectGroup } from './selectGroup.component';
import {
SelectItem,
SelectItemProps,
} from '../select/selectItem.component';
import { CheckBox } from '../checkbox/checkbox.component';
/*
* Mock UIManager since Select relies on native measurements
* Mock Animated for testing animation callbacks
*/
jest.mock('react-native', () => {
const ActualReactNative = jest.requireActual('react-native');
ActualReactNative.UIManager.measureInWindow = (node, callback) => {
callback(0, 0, 42, 42);
};
ActualReactNative.Animated = {
...ActualReactNative.Animated,
timing: () => ({
start: (callback) => {
callback();
},
}),
};
return ActualReactNative;
});
describe('@select-item: component checks', () => {
const TestSelectItem = (props?: SelectItemProps) => (
<ApplicationProvider
mapping={mapping}
theme={light}>
<SelectItem {...props} />
</ApplicationProvider>
);
it('should render text passed to title prop', () => {
const component = render(
<TestSelectItem title='I love Babel'/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render component passed to title prop', () => {
const component = render(
<TestSelectItem title={props => <Text {...props}>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render components passed to accessoryLeft or accessoryRight props', () => {
const AccessoryLeft = (props): React.ReactElement<ImageProps> => (
<Image
{...props}
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/star.png' }}
/>
);
const AccessoryRight = (props): React.ReactElement<ImageProps> => (
<Image
{...props}
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/home.png' }}
/>
);
const component = render(
<TestSelectItem
accessoryLeft={AccessoryLeft}
accessoryRight={AccessoryRight}
/>,
);
const [accessoryLeft, accessoryRight] = component.queryAllByType(Image);
expect(accessoryLeft).toBeTruthy();
expect(accessoryRight).toBeTruthy();
expect(accessoryLeft.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/star.png');
expect(accessoryRight.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/home.png');
});
it('should call onPress', () => {
const onPress = jest.fn();
const component = render(
<TestSelectItem onPress={onPress}/>,
);
fireEvent.press(component.queryByType(TouchableOpacity));
expect(onPress).toHaveBeenCalled();
});
it('should call onPressIn', () => {
const onPressIn = jest.fn();
const component = render(
<TestSelectItem onPressIn={onPressIn}/>,
);
fireEvent(component.queryByType(TouchableOpacity), 'pressIn');
expect(onPressIn).toHaveBeenCalled();
});
it('should call onPressOut', () => {
const onPressOut = jest.fn();
const component = render(
<TestSelectItem onPressOut={onPressOut}/>,
);
fireEvent(component.queryByType(TouchableOpacity), 'pressOut');
expect(onPressOut).toHaveBeenCalled();
});
});
describe('@select: component checks', () => {
const TestSelect = React.forwardRef((props: Partial<SelectProps>, ref: React.Ref<Select>) => {
const [selectedIndex, setSelectedIndex] = React.useState(props.selectedIndex);
const onSelect = (index: IndexPath | IndexPath[]) => {
setSelectedIndex(index);
props.onSelect && props.onSelect(index);
};
return (
<ApplicationProvider
mapping={mapping}
theme={light}>
<Select
ref={ref}
{...props}
selectedIndex={selectedIndex}
onSelect={onSelect}>
<SelectItem title='Option 1'/>
<SelectItem title='Option 2'/>
</Select>
</ApplicationProvider>
);
});
/*
* In this test:
* [0] for modal control touchable
* [1] for modal backdrop
* ...rest for options
*/
const touchables = {
findControlTouchable: (api: RenderAPI) => api.queryAllByType(TouchableOpacity)[0],
findBackdropTouchable: (api: RenderAPI) => api.queryAllByType(TouchableOpacity)[1],
findOptionTouchable: (api: RenderAPI, index: number) => api.queryAllByType(TouchableOpacity)[index + 2],
};
it('should render placeholder', () => {
const component = render(
<TestSelect placeholder='I love Babel'/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render placeholder as function component', () => {
const component = render(
<TestSelect placeholder={props => <Text {...props}>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render placeholder as pure JSX component', () => {
const component = render(
<TestSelect placeholder={<Text>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render label', () => {
const component = render(
<TestSelect label='I love Babel'/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render label as function component', () => {
const component = render(
<TestSelect label={props => <Text {...props}>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render label as pure JSX component', () => {
const component = render(
<TestSelect label={<Text>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render caption', () => {
const component = render(
<TestSelect caption='I love Babel'/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render caption as function component', () => {
const component = render(
<TestSelect caption={props => <Text {...props}>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render caption as pure JSX component', () => {
const component = render(
<TestSelect caption={<Text>I love Babel</Text>}/>,
);
expect(component.queryByText('I love Babel')).toBeTruthy();
});
it('should render function components passed to accessoryLeft or accessoryRight props', () => {
const AccessoryLeft = (props): React.ReactElement<ImageProps> => (
<Image
{...props}
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/star.png' }}
/>
);
const AccessoryRight = (props): React.ReactElement<ImageProps> => (
<Image
{...props}
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/home.png' }}
/>
);
const component = render(
<TestSelect
accessoryLeft={AccessoryLeft}
accessoryRight={AccessoryRight}
/>,
);
const [accessoryLeft, accessoryRight] = component.queryAllByType(Image);
expect(accessoryLeft).toBeTruthy();
expect(accessoryRight).toBeTruthy();
expect(accessoryLeft.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/star.png');
expect(accessoryRight.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/home.png');
});
it('should render JSX components passed to accessoryLeft or accessoryRight props', () => {
const AccessoryLeft = (
<Image
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/star.png' }}
/>
);
const AccessoryRight = (
<Image
source={{ uri: 'https://akveo.github.io/eva-icons/fill/png/128/home.png' }}
/>
);
const component = render(
<TestSelect
accessoryLeft={AccessoryLeft}
accessoryRight={AccessoryRight}
/>,
);
const [accessoryLeft, accessoryRight] = component.queryAllByType(Image);
expect(accessoryLeft).toBeTruthy();
expect(accessoryRight).toBeTruthy();
expect(accessoryLeft.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/star.png');
expect(accessoryRight.props.source.uri).toEqual('https://akveo.github.io/eva-icons/fill/png/128/home.png');
});
it('should not render options when not focused', () => {
const component = render(
<TestSelect/>,
);
expect(component.queryByText('Option 1')).toBeFalsy();
expect(component.queryByText('Option 2')).toBeFalsy();
});
it('should render options when becomes focused', async () => {
const component = render(
<TestSelect/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const firstOption = await waitForElement(() => component.queryByText('Option 1'));
const secondOption = component.queryByText('Option 2');
expect(firstOption).toBeTruthy();
expect(secondOption).toBeTruthy();
});
it('should hide options when backdrop is pressed', async () => {
const component = render(
<TestSelect/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const backdrop = await waitForElement(() => touchables.findBackdropTouchable(component));
fireEvent.press(backdrop);
const firstOption = await waitForElement(() => touchables.findOptionTouchable(component, 0));
const secondOption = component.queryByText('Option 2');
expect(firstOption).toBeFalsy();
expect(secondOption).toBeFalsy();
});
it('should call onSelect with single option index', async () => {
const onSelect = jest.fn((index: IndexPath) => {
expect(index.row).toEqual(1);
expect(index.section).toBeFalsy();
});
const component = render(
<TestSelect onSelect={onSelect}/>,
);
fireEvent.press(touchables.findControlTouchable(component));
await waitForElement(() => null);
fireEvent.press(touchables.findOptionTouchable(component, 1));
});
it('should call onSelect with array of indices', async () => {
const onSelect = jest.fn((indices: IndexPath[]) => {
const [firstIndex, secondIndex, ...restIndices] = indices;
expect(firstIndex.row).toEqual(0);
expect(firstIndex.section).toBeFalsy();
expect(secondIndex.row).toEqual(1);
expect(secondIndex.section).toBeFalsy();
expect(restIndices.length).toEqual(0);
});
const component = render(
<TestSelect
multiSelect={true}
selectedIndex={[new IndexPath(0)]}
onSelect={onSelect}
/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const optionTouchable = await waitForElement(() => component.queryByText('Option 2'));
fireEvent.press(optionTouchable);
});
it('should render checkboxes when multiselect', async () => {
const component = render(
<TestSelect multiSelect={true}/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const checkboxes = await waitForElement(() => component.queryAllByType(CheckBox));
expect(checkboxes.length).toEqual(2);
});
it('should call onSelect when pressing checkbox', async () => {
const onSelect = jest.fn((indices: IndexPath[]) => {
expect(indices[0].row).toEqual(1);
});
const component = render(
<TestSelect
multiSelect={true}
onSelect={onSelect}
/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const option2Checkbox = await waitForElement(() => component.queryAllByType(CheckBox)[1]);
fireEvent.press(option2Checkbox);
});
it('should call onFocus', async () => {
const onFocus = jest.fn();
const component = render(
<TestSelect onFocus={onFocus}/>,
);
fireEvent.press(touchables.findControlTouchable(component));
await waitForElement(() => expect(onFocus).toHaveBeenCalled());
});
it('should call onBlur', async () => {
const onBlur = jest.fn();
const component = render(
<TestSelect onBlur={onBlur}/>,
);
fireEvent.press(touchables.findControlTouchable(component));
await waitForElement(() => null);
fireEvent.press(touchables.findBackdropTouchable(component));
await waitForElement(() => expect(onBlur).toHaveBeenCalled());
});
it('should call onPressIn', () => {
const onPressIn = jest.fn();
const component = render(
<TestSelect onPressIn={onPressIn}/>,
);
fireEvent(touchables.findControlTouchable(component), 'pressIn');
expect(onPressIn).toHaveBeenCalled();
});
it('should call onPressOut', () => {
const onPressOut = jest.fn();
const component = render(
<TestSelect onPressOut={onPressOut}/>,
);
fireEvent(touchables.findControlTouchable(component), 'pressOut');
expect(onPressOut).toHaveBeenCalled();
});
it('should be able to call focus with ref', async () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.focus).toBeTruthy();
componentRef.current.focus();
});
it('should be able to call blur with ref', async () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.blur).toBeTruthy();
componentRef.current.blur();
});
it('should be able to call isFocused with ref', () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.isFocused).toBeTruthy();
componentRef.current.isFocused();
});
it('should be able to call clear with ref', () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.clear).toBeTruthy();
componentRef.current.clear();
});
it('should be able to call show with ref', () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.show).toBeTruthy();
componentRef.current.show();
});
it('should be able to call hide with ref', async () => {
const componentRef: React.RefObject<Select> = React.createRef();
render(
<TestSelect ref={componentRef}/>,
);
expect(componentRef.current.hide).toBeTruthy();
componentRef.current.hide();
});
});
describe('@select: component checks with groups', () => {
const TestSelect = React.forwardRef((props: Partial<SelectProps>, ref: React.Ref<Select>) => {
const [selectedIndex, setSelectedIndex] = React.useState(props.selectedIndex);
const onSelect = (index: IndexPath | IndexPath[]) => {
setSelectedIndex(index);
props.onSelect && props.onSelect(index);
};
return (
<ApplicationProvider
mapping={mapping}
theme={light}>
<Select
ref={ref}
{...props}
selectedIndex={selectedIndex}
onSelect={onSelect}>
<SelectGroup title='Group 1'>
<SelectItem title='Option 1.1'/>
<SelectItem title='Option 1.2'/>
</SelectGroup>
<SelectGroup title='Group 2'>
<SelectItem title='Option 2.1'/>
<SelectItem title='Option 2.2'/>
</SelectGroup>
</Select>
</ApplicationProvider>
);
});
const touchables = {
findControlTouchable: (api: RenderAPI) => api.queryAllByType(TouchableOpacity)[0],
};
it('should select single option in group', async () => {
const onSelect = jest.fn((index: IndexPath) => {
expect(index.row).toEqual(1);
expect(index.section).toEqual(0);
});
const component = render(
<TestSelect onSelect={onSelect}/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const option12Touchable = await waitForElement(() => component.getByText('Option 1.2'));
fireEvent.press(option12Touchable);
});
it('should select options group', async () => {
const onSelect = jest.fn((indices: IndexPath[]) => {
const [firstIndex, secondIndex, ...restIndices] = indices;
expect(firstIndex.row).toEqual(0);
expect(firstIndex.section).toEqual(1);
expect(secondIndex.row).toEqual(1);
expect(secondIndex.section).toEqual(1);
expect(restIndices.length).toEqual(0);
});
const component = render(
<TestSelect
multiSelect={true}
onSelect={onSelect}
/>,
);
fireEvent.press(touchables.findControlTouchable(component));
const group2Touchable = await waitForElement(() => component.getByText('Group 2'));
fireEvent.press(group2Touchable);
});
}); | the_stack |
import {
Ast,
SlotAst,
jsonAst,
CallAst,
ListAst,
walkAst,
astSlotNames,
RuleAst,
StatementAst,
} from "../ast/Ast";
import SqrlAst from "../ast/SqrlAst";
import SqrlAstTransformer from "../ast/SqrlAstTransformer";
import {
SqrlSlot,
SqrlConstantSlot,
SqrlStatementSlot,
SqrlFixedSlot,
SqrlIteratorSlot,
SqrlEmptySlot,
SqrlInputSlot,
} from "../slot/SqrlSlot";
import { SqrlInstance } from "../function/Instance";
import invariant from "../jslib/invariant";
import mapObject from "../jslib/mapObject";
import { SerializedSlot, deserializeSlot } from "../slot/SerializedSlot";
import { isValidFeatureName } from "../feature/FeatureName";
import SqrlImporter from "./SqrlImporter";
import SqrlFeatureSlot from "../slot/SqrlFeatureSlot";
import { reduceTruthTable } from "./SqrlTruthTable";
import { murmurhashJsonHexSync } from "../jslib/murmurhashJson";
import { EntityId } from "../platform/EntityId";
import { Filesystem, EmptyFilesystem } from "../api/filesystem";
import SqrlRuleSlot from "../slot/SqrlRuleSlot";
import { RuleSpec, RuleSpecMap } from "../api/spec";
import { AbstractLogger } from "../util/Logger";
import { LogProperties, getGlobalLogger } from "../api/log";
import { buildSqrlError } from "./buildSqrlError";
import { sqrlInvariant } from "../api/parse";
import { FeatureMap } from "../api/execute";
import { INPUT_FUNCTION } from "../function/ControlFunctions";
export interface SqrlParserSourceOptions {
statements: StatementAst[];
source: string;
filesystem: Filesystem;
}
export interface SqrlParserOptions {
statements: StatementAst[];
filesystem?: Filesystem;
instance: SqrlInstance;
allowAssertions?: boolean;
allowPrivate?: boolean;
baseLibrary?: string;
source?: string;
mainFile?: string;
setInputs?: FeatureMap;
allowReplaceInput?: boolean;
usedFiles?: string[];
}
export interface SqrlSerialized {
slots: {
[name: string]: SerializedSlot;
};
}
export type SlotFilter = (slotName: string) => boolean;
function labelOperationWaitNames(props?: {
operation: string;
label: string;
feature: string;
}) {
const { operation, label, feature } = props;
return [`wait-label:op=${operation}:label=${label}:feature=${feature}`];
}
export abstract class SqrlParseInfo extends AbstractLogger {
baseLibrary: string;
allowAssertions: boolean;
allowPrivate: boolean;
instance: SqrlInstance;
importer: SqrlImporter;
filesystem: Filesystem;
remainingInputs: FeatureMap;
allowReplaceInput: boolean;
statements: StatementAst[];
constructor(
public slots: { [name: string]: SqrlSlot },
public options: SqrlParserOptions
) {
super();
this.statements = options.statements;
this.allowAssertions = options.allowAssertions || false;
this.allowPrivate = options.allowPrivate || false;
this.baseLibrary = options.baseLibrary || "common.sqrl";
this.instance = options.instance;
this.allowReplaceInput = options.allowReplaceInput || false;
this.filesystem = options.filesystem || new EmptyFilesystem();
this.importer = new SqrlImporter(
this.filesystem,
this.instance.customFunctions
);
this.remainingInputs = Object.assign({}, options.setInputs || {});
}
log(level: string, props: LogProperties, format: string, ...params: any[]) {
getGlobalLogger().log(level, props, format, params);
}
mapToSlots(names: string[]): SqrlSlot[] {
return names.map((name) => {
invariant(
this.slots.hasOwnProperty(name),
"Could not find slot:: " + name
);
return this.slots[name];
});
}
// These two could be generalised with some typescript magic
foreachRuleSlot(callback: (slot: SqrlRuleSlot) => void) {
Object.values(this.slots).forEach((slot) => {
if (slot instanceof SqrlRuleSlot) {
callback(slot);
}
});
}
foreachFeatureSlot(callback: (slot: SqrlFeatureSlot) => void) {
Object.values(this.slots).forEach((slot) => {
if (slot instanceof SqrlFeatureSlot) {
callback(slot);
}
});
}
resetReplaceableFeatures() {
this.foreachFeatureSlot((slot) => slot.allowReplace());
}
getRuleSlots() {
const rv: { [name: string]: SqrlRuleSlot } = {};
this.foreachRuleSlot((slot) => {
rv[slot.name] = slot;
});
return rv;
}
getFeatureSlotNames() {
return Object.keys(this.slots).filter(isValidFeatureName);
}
getRuleSpecs(): RuleSpecMap {
return mapObject(this.getRuleSlots(), (value) => {
return value.ruleSpec;
});
}
getRuleSpec(sourceAst: Ast, ruleName: string): RuleSpec {
const slot = this.slots[ruleName];
if (slot instanceof SqrlRuleSlot) {
return slot.ruleSpec;
} else {
throw buildSqrlError(sourceAst, `Unknown rule: ${ruleName}`);
}
}
}
export class SqrlParserState extends SqrlParseInfo {
_astTransformer: SqrlAstTransformer | null;
_pushStatement: (ast: Ast) => void = null;
globalWhere: Ast = SqrlAst.constant(true);
currentIterator: string | null = null;
instance: SqrlInstance;
usedFiles: Set<string>;
constructor(options: SqrlParserOptions, serialized: SqrlSerialized = null) {
super(
serialized
? mapObject(serialized.slots, (data) => deserializeSlot(data))
: {},
options
);
this.instance = this.instance;
this.usedFiles = new Set(options.usedFiles || []);
this.setDefaultValue("SqrlMutate", SqrlAst.constant(true));
this.setDefaultValue("SqrlIsClassify", SqrlAst.constant(true));
this.setDefaultValue("SqrlClock", SqrlAst.call("now", []));
const executionComplete = this.ensureStatementFeature(
null,
"SqrlExecutionComplete"
);
[...this.instance.statementFeatures].forEach((name) => {
this.ensureStatementFeature(null, name);
if (name !== "SqrlAssertionStatements") {
// Assertion statements often depend on execution
executionComplete.addWait(name);
}
});
}
ensureIterator(ast: Ast, name: string) {
// Maybe one day we won't have to give unique names to iterators, but for
// now they can be referenced as features so there's no safe way of doing
// it.
if (this.slots.hasOwnProperty(name)) {
sqrlInvariant(
ast,
this.slots[name] instanceof SqrlIteratorSlot,
"Iterators cannot have the same name as a feature or rule"
);
} else {
this.slots[name] = new SqrlIteratorSlot(name);
}
}
setDefaultValue(name: string, valueAst: Ast) {
invariant(this.slotIsEmpty(name), "Slot was already defined: %s", name);
this.slots[name] = new SqrlFixedSlot(name, valueAst, true);
}
pushStatement(ast: Ast) {
invariant(
this._pushStatement !== null,
"ParserState push statement not set"
);
if (ast.type === "let") {
if (ast.expr.type === "call" && ast.expr.func === INPUT_FUNCTION) {
if (!this.slotIsEmpty(ast.feature)) {
throw buildSqrlError(ast, "Multiple definitions of feature");
}
if (this.remainingInputs.hasOwnProperty(ast.feature)) {
this.slots[ast.feature] = new SqrlConstantSlot(
ast.feature,
this.remainingInputs[ast.feature]
);
delete this.remainingInputs[ast.feature];
} else {
this.slots[ast.feature] = new SqrlInputSlot(ast.feature);
}
return;
}
} else if (
ast.type === "call" &&
ast.func === "_resetReplaceableFeatures"
) {
sqrlInvariant(
ast,
ast.args.length === 0,
"Expected no arguments to _resetReplaceableFeatures"
);
this.resetReplaceableFeatures();
return;
}
ast = this.transform(ast);
if (ast.type === "let") {
const oldSlot = this.slots[ast.feature];
if (
this.slotIsEmpty(ast.feature) ||
(oldSlot instanceof SqrlInputSlot && this.allowReplaceInput) ||
(oldSlot instanceof SqrlFixedSlot && oldSlot.replaceable)
) {
this.slots[ast.feature] = new SqrlFeatureSlot(ast.feature);
}
const slot = this.slots[ast.feature];
if (!(slot instanceof SqrlFeatureSlot)) {
throw buildSqrlError(ast, "Feature was previously defined");
}
slot.mergeGlobal(ast, this.globalWhere, (ast: Ast, globalWhere: Ast) =>
this.combineWithProvidedGlobalWhere(ast, globalWhere)
);
} else if (ast.type === "rule") {
if (!this.slotIsEmpty(ast.name)) {
throw buildSqrlError(ast, "Feature was previously defined");
}
const slot = new SqrlRuleSlot(ast.name);
this.slots[ast.name] = slot;
slot.setRule(ast as RuleAst, this.globalWhere);
} else {
this._pushStatement(ast);
}
}
setPushStatement(pushStatement) {
this._pushStatement = pushStatement;
}
slotIsEmpty(name: string): boolean {
if (!this.slots.hasOwnProperty(name)) {
return true;
} else if (this.slots[name] instanceof SqrlEmptySlot) {
return true;
} else {
return false;
}
}
hasSlot(name) {
return this.slots.hasOwnProperty(name);
}
getSlot(name: string): SqrlSlot {
invariant(this.hasSlot(name), "Could not find named slot:: %s", name);
return this.slots[name];
}
getFeatureSlot(sourceAst: Ast, name: string): SqrlFeatureSlot {
if (!this.hasSlot(name)) {
throw buildSqrlError(sourceAst, "Could not find feature: %s", name);
}
const slot = this.slots[name];
if (!(slot instanceof SqrlFeatureSlot)) {
throw new Error("Expected feature slot: " + name);
}
return slot;
}
ensureAllSlots(slotNames: string[]): void {
slotNames.forEach((name) => {
if (!this.slots.hasOwnProperty(name)) {
this.slots[name] = new SqrlEmptySlot(name);
}
});
}
astContainsCurrentIterator(ast: Ast): boolean {
let seenIterator = false;
walkAst(ast, (node) => {
if (node.type === "iterator") {
seenIterator = true;
}
});
return seenIterator;
}
ensureStatementFeature(
sourceAst: Ast,
featureName: string
): SqrlStatementSlot {
if (this.slots.hasOwnProperty(featureName)) {
const slot = this.slots[featureName];
if (!(slot instanceof SqrlStatementSlot)) {
throw new Error("Expected statement slot: " + featureName);
}
return slot;
} else {
return this.addStatementFeature(sourceAst, featureName);
}
}
addWhenStatement(
sourceAst: Ast,
basicLabelOperations,
ast: Ast,
name: string | null = null
) {
const registeredCall = SqrlAst.registerCall(ast);
this.addStatement(sourceAst, "SqrlWhenStatements", registeredCall, name);
basicLabelOperations.forEach((labelOperation) => {
const waitNames = labelOperationWaitNames(labelOperation);
waitNames.forEach((waitName) => {
this.ensureStatementFeature(sourceAst, waitName);
this.addStatement(sourceAst, waitName, registeredCall);
});
});
}
addCallStatement(sourceAst, ast: CallAst) {
this.instance.assertStatementAst(ast);
const statementFeature = this.instance.statementFeature(ast.func);
return this.addStatement(
sourceAst,
statementFeature,
SqrlAst.registerCall(ast)
);
}
addStatement(
sourceAst: Ast,
globalName: string,
ast: Ast,
name: string = null
): SlotAst {
const slotAst: SlotAst = this.newGlobal(sourceAst, ast, name);
invariant(
this.slots.hasOwnProperty(slotAst.slotName),
"Could not find given slot"
);
invariant(
this.slots.hasOwnProperty(globalName),
"Could not find global for statement:: %s",
globalName
);
this.addStatementSlot(globalName, slotAst);
return slotAst;
}
addStatementSlot(globalName: string, slotAst: SlotAst) {
const slot = this.slots[globalName];
if (!(slot instanceof SqrlStatementSlot)) {
throw new Error("Expected statement slot: " + globalName);
}
slot.addWait(slotAst.slotName);
}
newGlobal(sourceAst: Ast, ast: Ast, name: string = null): SlotAst {
invariant(typeof sourceAst === "object", "Expected object sourceAst");
invariant(typeof ast === "object", "Expected object ast");
sqrlInvariant(
sourceAst,
this.currentIterator === null || !this.astContainsCurrentIterator(ast),
"Expression is not valid during a list comprehension"
);
// transform globals before saving to slot
ast = SqrlAst.branch(this.globalWhere, this.transform(ast), null);
// If we got this far give the global a name based on its ast slot
if (!name) {
name = "ast:" + SqrlAst.hash(ast);
}
// If the global already exists ensure it is unique, otherwise create a new
// global with the given name.
if (!this.slotIsEmpty(name)) {
const slot = this.slots[name];
if (!(slot instanceof SqrlFixedSlot)) {
throw new Error("expected fixed slot for: " + name);
}
if (!SqrlAst.areEqual(ast, slot.finalizedAst())) {
throw new Error(
"Slots saved with the same name must be identical:: " +
`${name} [${jsonAst(slot.finalizedAst())} != ${jsonAst(ast)}]`
);
}
} else {
this.slots[name] = new SqrlFixedSlot(name, ast);
}
return SqrlAst.slot(this.getSlot(name));
}
addStatementFeature(ast: Ast, featureName: string): SqrlStatementSlot {
if (this.slots.hasOwnProperty(featureName)) {
throw buildSqrlError(ast, "Feature already exists: " + featureName);
}
const slot = new SqrlStatementSlot(featureName);
this.slots[featureName] = slot;
return slot;
}
ensureConstantSlot(
sourceAst: Ast,
name: string,
value: any
): SqrlConstantSlot {
if (this.slots.hasOwnProperty(name)) {
const slot = this.slots[name];
if (!(slot instanceof SqrlConstantSlot)) {
throw new Error("Expected SqrlConstantSlot for " + name);
}
return slot;
} else {
const slot = new SqrlConstantSlot(name, value);
this.slots[name] = slot;
return slot;
}
}
ensureGlobalWithoutWhere(
sourceAst: Ast,
name: string,
callback: () => Ast
): SqrlSlot {
if (!this.slots.hasOwnProperty(name)) {
const rv = callback();
this.newGlobalWithoutWhere(sourceAst, rv as Ast, name);
return this.slots[name];
}
return this.slots[name];
}
newGlobalWithoutWhere(
sourceAst: Ast,
ast: Ast,
name: string = null
): SlotAst {
return this.withoutGlobalWhere(() => this.newGlobal(sourceAst, ast, name));
}
printFeatureLoop(slot: SqrlSlot) {
const current: Set<string> = new Set();
const results = [];
const printLevel = (slot: SqrlSlot, prefix: string = "") => {
if (current.has(slot.name)) {
// console.log(prefix + ' ! ' + colored('red', slot.name));
results.push(prefix + slot.name);
return;
}
// console.log(prefix + ' > ' + slot.name);
const add = !current.has(slot.name);
if (add) {
current.add(slot.name);
}
astSlotNames(slot.finalizedAst()).forEach((slotName) => {
printLevel(this.slots[slotName], prefix + slot.name + " > ");
});
if (add) {
current.delete(slot.name);
}
};
printLevel(slot);
return results;
}
combineWithProvidedGlobalWhere(
whereAst: Ast,
globalWhere: Ast
): {
combinedAst: Ast;
whereAst: Ast;
whereFeatures: string[];
whereTruth: string;
} {
// Merge with the global where, but take location from ast
const combinedAst = SqrlAst.and(globalWhere, whereAst);
combinedAst.location = whereAst.location;
const {
features: whereFeatures,
truthTable: whereTruth,
} = reduceTruthTable(combinedAst);
whereAst = SqrlAst.bool(combinedAst);
if (whereAst.type !== "constant") {
const whereBoolName = `bool(${whereFeatures.join(",")}:${whereTruth})`;
// We skip out on newGlobal's ast comparison here since we don't (yet)
// reduce bool clauses so bool(A && (A)) will have the same name/truth table
// as bool(A) even though the result is identical
if (this.slots.hasOwnProperty(whereBoolName)) {
whereAst = SqrlAst.slot(this.getSlot(whereBoolName));
} else {
whereAst = this.newGlobalWithoutWhere(
whereAst,
SqrlAst.bool(combinedAst),
whereBoolName
);
}
}
return { combinedAst, whereAst, whereFeatures, whereTruth };
}
combineGlobalWhere(whereAst: Ast) {
return this.combineWithProvidedGlobalWhere(whereAst, this.globalWhere);
}
transform(ast: Ast): Ast {
if (!this._astTransformer) {
this._astTransformer = new SqrlAstTransformer(this);
}
return this._astTransformer.transform(ast);
}
withoutGlobalWhere<T>(callback: () => T): T {
const prevWhere: Ast = this.globalWhere;
this.globalWhere = SqrlAst.constant(true);
const result: T = callback();
this.globalWhere = prevWhere;
return result;
}
wrapWhere<T>(whereAst: Ast, callback: () => T): T {
// Run a reduce on this ast to ensure that we can reduce it
reduceTruthTable(whereAst);
const prevWhere = this.globalWhere;
this.globalWhere = SqrlAst.and(this.globalWhere, whereAst);
const result: T = callback();
this.globalWhere = prevWhere;
return result;
}
wrapIterator<T>(iterator: string, callback: () => T): T {
invariant(
this.currentIterator === null,
"Multiple levels of iterators are not supported."
);
this.currentIterator = iterator;
const result: T = callback();
this.currentIterator = null;
return result;
}
serialize(): SqrlSerialized {
return {
slots: mapObject(this.slots, (slot) => slot.serialize()),
};
}
extractDataObjectConstantKeys(ast) {
sqrlInvariant(
ast,
SqrlAst.isSimpleDataObject(ast),
"Expected argument to be object with constant keys"
);
if (ast.type === "constant" && typeof ast.value === "object") {
return Object.keys(ast.value);
}
const keys = [];
for (let idx = 0; idx < ast.args.length; idx += 2) {
const arg = ast.args[idx];
sqrlInvariant(
arg,
arg.type === "constant" && typeof arg.value === "string",
"Expected constant string key for object here"
);
keys.push(arg.value);
}
return keys;
}
extractListConstantStrings(ast: ListAst): string[] {
sqrlInvariant(ast, ast.type === "list", "Expected list");
return ast.exprs.map((e) => {
if (e.type !== "constant" || typeof e.value !== "string") {
throw buildSqrlError(ast, "Expected constant string here");
}
return e.value;
});
}
interpretCounterCallAst(
ast: CallAst
): {
args: Ast;
whereAst: Ast;
whereFeatures?: string[];
whereTruth?: string;
} {
if (ast.type !== "call") {
throw new Error("Cannot interpret non-function call:: " + ast.type);
}
sqrlInvariant(
ast,
ast.args.length === 2,
`${ast.func}() function requires keyword arguments`
);
const args: Ast = ast.args[0];
const { whereAst, whereFeatures, whereTruth } = this.combineGlobalWhere(
ast.args[1]
);
const rv = { args, whereAst };
if (whereTruth !== "") {
Object.assign(rv, { whereFeatures, whereTruth });
}
return rv;
}
constantEntityAst(sourceAst: Ast, type: string, key: string): SlotAst {
const entityAst = SqrlAst.call("entity", SqrlAst.constants(type, key));
return this.withoutGlobalWhere(() =>
this.newGlobal(sourceAst, entityAst, `entity(${type}/${key})`)
);
}
counterEntity(
sourceAst: Ast,
entityType: string,
props: {
[key: string]: any;
}
): {
entityId: EntityId;
entityAst: SlotAst;
} {
const entityKey = murmurhashJsonHexSync(props);
const entityId = new EntityId(entityType, entityKey);
const entityAst = this.constantEntityAst(
sourceAst,
entityId.type,
entityId.key
);
return { entityId, entityAst };
}
} | the_stack |
import { inject, injectable } from 'inversify';
import * as apid from '../../../../../../api';
import GenreUtil from '../../../..//util/GenreUtil';
import { AudioComponentType, AudioSamplingRate, VideoComponentType } from '../../../../lib/event';
import DateUtil from '../../../../util/DateUtil';
import IReservesApiModel from '../../../api/reserves/IReservesApiModel';
import IScheduleApiModel from '../../../api/schedule/IScheduleApiModel';
import IChannelModel from '../../../channels/IChannelModel';
import IServerConfigModel from '../../../serverConfig/IServerConfigModel';
import { ISettingStorageModel } from '../../../storage/setting/ISettingStorageModel';
import IManualReserveState, { EncodedOption, ManualReserveOption, ManualSaveOption, ProgramStateData, SelectorItem, TimeSpecifiedOption } from './IManualReserveState';
@injectable()
export default class ManualReserveState implements IManualReserveState {
public isTimeSpecification: boolean = false;
public timeSpecifiedOption: TimeSpecifiedOption = {
name: null,
channelId: null,
startAt: null,
endAt: null,
};
public reserveOption: ManualReserveOption = {
allowEndLack: true,
};
public saveOption: ManualSaveOption = {
parentDirectoryName: null,
directory: null,
recordedFormat: null,
};
public encodeOption: EncodedOption = {
mode1: null,
encodeParentDirectoryName1: null,
directory1: null,
mode2: null,
encodeParentDirectoryName2: null,
directory2: null,
mode3: null,
encodeParentDirectoryName3: null,
directory3: null,
isDeleteOriginalAfterEncode: false,
};
// ルールオプションのアコーディオンの開閉を行う
public optionPanel: number[] = [];
private scheduleApiModel: IScheduleApiModel;
private reservesApiModel: IReservesApiModel;
private channelModel: IChannelModel;
private serverConfig: IServerConfigModel;
private settingModel: ISettingStorageModel;
private programInfo: ProgramStateData | null = null;
constructor(
@inject('IScheduleApiModel') scheduleApiModel: IScheduleApiModel,
@inject('IReservesApiModel') reservesApiModel: IReservesApiModel,
@inject('IChannelModel') channelModel: IChannelModel,
@inject('IServerConfigModel') serverConfig: IServerConfigModel,
@inject('ISettingStorageModel') settingModel: ISettingStorageModel,
) {
this.scheduleApiModel = scheduleApiModel;
this.reservesApiModel = reservesApiModel;
this.channelModel = channelModel;
this.serverConfig = serverConfig;
this.settingModel = settingModel;
}
/**
* 各種オプションの初期化
*/
public init(): void {
this.isTimeSpecification = false;
this.timeSpecifiedOption = {
name: null,
channelId: null,
startAt: null,
endAt: null,
};
this.reserveOption = {
allowEndLack: true,
};
this.saveOption = {
parentDirectoryName: null,
directory: null,
recordedFormat: null,
};
this.encodeOption = {
mode1: null,
encodeParentDirectoryName1: null,
directory1: null,
mode2: null,
encodeParentDirectoryName2: null,
directory2: null,
mode3: null,
encodeParentDirectoryName3: null,
directory3: null,
isDeleteOriginalAfterEncode: false,
};
this.optionPanel = [0, 1, 2, 3, 6];
}
/**
* programInfo から手動時刻オプションを設定する
* programInfo が null の場合はエラーとなる
*/
public setTimeSpecifiedOption(): void {
if (this.programInfo === null) {
throw new Error('ProgramInfoIsNull');
}
this.timeSpecifiedOption.name = this.programInfo.programItem.name;
this.timeSpecifiedOption.channelId = this.programInfo.programItem.channelId;
this.timeSpecifiedOption.startAt = new Date(this.programInfo.programItem.startAt);
this.timeSpecifiedOption.endAt = new Date(this.programInfo.programItem.endAt);
}
/**
* apid.ReserveItem の内容を反映させる
* @param reserveItem: apid.ReserveItem
*/
public setOptions(reserveItem: apid.ReserveItem): void {
this.isTimeSpecification = reserveItem.isTimeSpecified;
if (reserveItem.isTimeSpecified === true) {
this.timeSpecifiedOption = {
name: reserveItem.name,
channelId: reserveItem.channelId,
startAt: new Date(reserveItem.startAt),
endAt: new Date(reserveItem.endAt),
};
}
this.reserveOption.allowEndLack = reserveItem.allowEndLack;
if (typeof reserveItem.parentDirectoryName !== 'undefined') {
this.saveOption.parentDirectoryName = reserveItem.parentDirectoryName;
}
if (typeof reserveItem.directory !== 'undefined') {
this.saveOption.directory = reserveItem.directory;
}
if (typeof reserveItem.recordedFormat !== 'undefined') {
this.saveOption.recordedFormat = reserveItem.recordedFormat;
}
if (typeof reserveItem.encodeMode1 !== 'undefined') {
this.encodeOption.mode1 = reserveItem.encodeMode1;
}
if (typeof reserveItem.encodeParentDirectoryName1 !== 'undefined') {
this.encodeOption.encodeParentDirectoryName1 = reserveItem.encodeParentDirectoryName1;
}
if (typeof reserveItem.encodeDirectory1 !== 'undefined') {
this.encodeOption.directory1 = reserveItem.encodeDirectory1;
}
if (typeof reserveItem.encodeMode2 !== 'undefined') {
this.encodeOption.mode2 = reserveItem.encodeMode2;
this.optionPanel.push(4);
}
if (typeof reserveItem.encodeParentDirectoryName2 !== 'undefined') {
this.encodeOption.encodeParentDirectoryName2 = reserveItem.encodeParentDirectoryName2;
}
if (typeof reserveItem.encodeDirectory2 !== 'undefined') {
this.encodeOption.directory2 = reserveItem.encodeDirectory2;
}
if (typeof reserveItem.encodeMode3 !== 'undefined') {
this.encodeOption.mode3 = reserveItem.encodeMode3;
this.optionPanel.push(5);
}
if (typeof reserveItem.encodeParentDirectoryName3 !== 'undefined') {
this.encodeOption.encodeParentDirectoryName3 = reserveItem.encodeParentDirectoryName3;
}
if (typeof reserveItem.encodeDirectory3 !== 'undefined') {
this.encodeOption.directory3 = reserveItem.encodeDirectory3;
}
if (typeof reserveItem.encodeMode1 !== 'undefined') {
this.encodeOption.mode1 = reserveItem.encodeMode1;
}
if (typeof reserveItem.encodeParentDirectoryName1 !== 'undefined') {
this.encodeOption.encodeParentDirectoryName1 = reserveItem.encodeParentDirectoryName1;
}
this.encodeOption.isDeleteOriginalAfterEncode = reserveItem.isDeleteOriginalAfterEncode;
}
/**
* 番組情報を取得する
* @param programId: apid.ProgramId
* @param isHalfWidth: boolean 半角で取得するか
* @return Promise<void>
*/
public async fetchProgramInfo(programId: apid.ProgramId, isHalfWidth: boolean): Promise<void> {
const program = await this.scheduleApiModel.getSchedule(programId, isHalfWidth);
this.programInfo = this.convertScheduleProgramItemToStateData(program, isHalfWidth);
}
/**
* 予約情報の取得
* @param reserveId: apid.ReserveId
* @param isHalfWidth: boolean 半角で取得するか
* @return Promise<apid.ReserveItem>
*/
public async getReserveItem(reserveId: apid.ReserveId, isHalfWidth: boolean): Promise<apid.ReserveItem> {
return await this.reservesApiModel.get(reserveId, isHalfWidth);
}
/**
* apid.ScheduleProgramItem を ProgramStateData に変換する
* @param program: apid.ScheduleProgramItem
* @param isHalfWidth: boolean
* @return ProgramStateData
*/
private convertScheduleProgramItemToStateData(program: apid.ScheduleProgramItem, isHalfWidth: boolean): ProgramStateData {
const startAt = DateUtil.getJaDate(new Date(program.startAt));
const endAt = DateUtil.getJaDate(new Date(program.endAt));
const channel = this.channelModel.findChannel(program.channelId, isHalfWidth);
const result: ProgramStateData = {
display: {
channelName: channel === null ? program.channelId.toString(10) : channel.name,
name: program.name,
day: DateUtil.format(startAt, 'MM/dd'),
dow: DateUtil.format(startAt, 'w'),
startTime: DateUtil.format(startAt, 'hh:mm'),
endTime: DateUtil.format(endAt, 'hh:mm'),
duration: Math.floor((program.endAt - program.startAt) / 1000 / 60),
genres: this.createGenres(program),
description: program.description,
extended: program.extended,
isFree: program.isFree,
},
programItem: program,
};
if (typeof program.videoComponentType !== 'undefined') {
const videoType = this.getVideoType(program.videoComponentType);
if (videoType !== null) {
result.display.videoType = videoType;
}
}
// audioType
if (typeof program.audioComponentType !== 'undefined') {
const audioType = this.getAudioMode(program.audioComponentType);
if (audioType !== null) {
result.display.audioType = audioType;
}
}
// audioSamplingRate
if (typeof program.audioSamplingRate !== 'undefined') {
const audioSamplingRate = this.getAudioSamplingRate(program.audioSamplingRate);
if (audioSamplingRate !== null) {
result.display.audioSamplingRate = audioSamplingRate;
}
}
return result;
}
/**
* ジャンル情報
* @param program: apid.ScheduleProgramItem
* @return string[]
*/
private createGenres(program: apid.ScheduleProgramItem): string[] {
const genres: string[] = [];
if (typeof program.genre1 !== 'undefined') {
const genre = GenreUtil.getGenres(program.genre1, program.subGenre1);
if (genre !== null) {
genres.push(genre);
}
}
if (typeof program.genre2 !== 'undefined') {
const genre = GenreUtil.getGenres(program.genre2, program.subGenre2);
if (genre !== null) {
genres.push(genre);
}
}
if (typeof program.genre3 !== 'undefined') {
const genre = GenreUtil.getGenres(program.genre3, program.subGenre3);
if (genre !== null) {
genres.push(genre);
}
}
return genres;
}
/**
* video 情報を取得
* @param videoComponentType: number
* @return videoComponentType | null
*/
private getVideoType(videoComponentType: number): string | null {
const str = (VideoComponentType as any)[videoComponentType];
return typeof str === 'undefined' ? null : str;
}
/**
* 音声情報を取得
* @param audioComponentType: number
* @return audio type | null
*/
private getAudioMode(audioComponentType: number): string | null {
const str = (AudioComponentType as any)[audioComponentType];
return typeof str === 'undefined' ? null : str;
}
/**
* 音声サンプリングレートを返す
* @return audio sampling rate | null
*/
private getAudioSamplingRate(audioSamplingRate: apid.ProgramAudioSamplingRate): string | null {
const str = AudioSamplingRate[audioSamplingRate];
return typeof str === 'undefined' ? null : str;
}
/**
* 放送局 item を返す
* @return SelectorItem[]
*/
public getChannelItems(): SelectorItem[] {
return this.channelModel.getChannels(this.settingModel.getSavedValue().isHalfWidthDisplayed).map(c => {
return {
text: c.name,
value: c.id,
};
});
}
/**
* 取得した番組情報を返す
* @return ProgramStateData
*/
public getProgramInfo(): ProgramStateData | null {
return this.programInfo;
}
/**
* 録画先ディレクトリの一覧を返す
* @return string[]
*/
public getPrentDirectoryItems(): string[] {
const config = this.serverConfig.getConfig();
return config === null ? [] : config.recorded;
}
/**
* エンコードモード一覧を返す
* @return string
*/
public getEncodeModeItems(): string[] {
const config = this.serverConfig.getConfig();
return config === null ? [] : config.encode;
}
/**
* エンコードに対応しているか
*/
public isEnableEncodeMode(): boolean {
return this.getEncodeModeItems().length > 0;
}
/**
* 予約追加
*/
public async addReserve(): Promise<void> {
// 予約オプション組み立て
const option = this.createManualReserveOption();
await this.reservesApiModel.add(option);
}
/**
* 予約更新
* @param reserveId: apid.ReserveId
*/
public async updateReserve(reserveId: apid.ReserveId): Promise<void> {
const option = this.createEditManualReserveOption();
await this.reservesApiModel.edit(reserveId, option);
}
/**
* 予約設定を組み立てる
* @return apid.ManualReserveOption
*/
private createManualReserveOption(): apid.ManualReserveOption {
const result: apid.ManualReserveOption = {
allowEndLack: this.reserveOption.allowEndLack,
};
if (this.isTimeSpecification === true) {
// 時刻予約
if (this.timeSpecifiedOption.name === null || this.timeSpecifiedOption.name.length === 0) {
throw new Error('TimeSpecifiedOptionNameError');
}
if (this.timeSpecifiedOption.channelId === null) {
throw new Error('TimeSpecifiedOptionChannelIdError');
}
if (this.timeSpecifiedOption.startAt === null || this.timeSpecifiedOption.endAt === null) {
throw new Error('TimeSpecifiedOptionTimeError');
}
result.timeSpecifiedOption = {
name: this.timeSpecifiedOption.name,
channelId: this.timeSpecifiedOption.channelId,
startAt: this.timeSpecifiedOption.startAt.getTime(),
endAt: this.timeSpecifiedOption.endAt.getTime(),
};
} else {
if (this.programInfo === null) {
throw new Error('ProgramIdIsNull');
}
// program id 予約
result.programId = this.programInfo.programItem.id;
}
// 保存オプション
const saveOption = this.getSaveOption();
if (saveOption !== null) {
result.saveOption = saveOption;
}
// エンコードオプション
const encodeOption = this.getEncodeOption();
if (encodeOption !== null) {
result.encodeOption = encodeOption;
}
// TODO tag
return result;
}
/**
* 予約編集オプションを組み立てる
* @return apid.EditManualReserveOption
*/
private createEditManualReserveOption(): apid.EditManualReserveOption {
const result: apid.EditManualReserveOption = {
allowEndLack: this.reserveOption.allowEndLack,
};
// 保存オプション
const saveOption = this.getSaveOption();
if (saveOption !== null) {
result.saveOption = saveOption;
}
// エンコードオプション
const encodeOption = this.getEncodeOption();
if (encodeOption !== null) {
result.encodeOption = encodeOption;
}
// TODO tag
return result;
}
/**
* apid.ReserveSaveOption を生成する
* @return apid.ReserveSaveOption | null
*/
private getSaveOption(): apid.ReserveSaveOption | null {
const saveOption: apid.ReserveSaveOption = {};
if (this.saveOption.parentDirectoryName !== null) {
saveOption.parentDirectoryName = this.saveOption.parentDirectoryName;
}
if (this.saveOption.directory !== null) {
saveOption.directory = this.saveOption.directory;
}
if (this.saveOption.recordedFormat !== null) {
saveOption.recordedFormat = this.saveOption.recordedFormat;
}
return Object.keys(saveOption).length > 0 ? saveOption : null;
}
/**
* apid.ReserveEncodedOption を生成する
* @return apid.ReserveEncodedOption | null
*/
private getEncodeOption(): apid.ReserveEncodedOption | null {
const encodeOption: apid.ReserveEncodedOption = {
isDeleteOriginalAfterEncode: this.encodeOption.isDeleteOriginalAfterEncode,
};
if (this.encodeOption.mode1 !== null) {
encodeOption.mode1 = this.encodeOption.mode1;
if (this.encodeOption.encodeParentDirectoryName1 !== null) {
encodeOption.encodeParentDirectoryName1 = this.encodeOption.encodeParentDirectoryName1;
}
if (this.encodeOption.directory1 !== null) {
encodeOption.directory1 = this.encodeOption.directory1;
}
}
if (this.encodeOption.mode2 !== null) {
encodeOption.mode2 = this.encodeOption.mode2;
if (this.encodeOption.encodeParentDirectoryName2 !== null) {
encodeOption.encodeParentDirectoryName2 = this.encodeOption.encodeParentDirectoryName2;
}
if (this.encodeOption.directory2 !== null) {
encodeOption.directory2 = this.encodeOption.directory2;
}
}
if (this.encodeOption.mode3 !== null) {
encodeOption.mode3 = this.encodeOption.mode3;
if (this.encodeOption.encodeParentDirectoryName3 !== null) {
encodeOption.encodeParentDirectoryName3 = this.encodeOption.encodeParentDirectoryName3;
}
if (this.encodeOption.directory3 !== null) {
encodeOption.directory3 = this.encodeOption.directory3;
}
}
return Object.keys(encodeOption).length > 1 ? encodeOption : null;
}
} | the_stack |
import type { ChildExecutorDecision } from "@effect/core/stream/Channel/ChildExecutorDecision"
import type { Channel } from "@effect/core/stream/Channel/definition/base"
import { ChannelBase } from "@effect/core/stream/Channel/definition/base"
import {
_Env,
_InDone,
_InElem,
_InErr,
_OutDone,
_OutDone2,
_OutElem,
_OutErr,
_OutErr2
} from "@effect/core/stream/Channel/definition/symbols"
import type { AsyncInputProducer } from "@effect/core/stream/Channel/SingleProducerAsyncInput"
import type { UpstreamPullRequest } from "@effect/core/stream/Channel/UpstreamPullRequest"
import type { UpstreamPullStrategy } from "@effect/core/stream/Channel/UpstreamPullStrategy"
// -----------------------------------------------------------------------------
// PipeTo
// -----------------------------------------------------------------------------
export class PipeTo<
Env,
InErr,
InElem,
InDone,
OutErr2,
OutElem2,
OutDone2,
OutErr,
OutElem,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr2, OutElem2, OutDone2> {
readonly _tag = "PipeTo"
constructor(
readonly left: Lazy<Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>>,
readonly right: Lazy<
Channel<Env, OutErr, OutElem, OutDone, OutErr2, OutElem2, OutDone2>
>
) {
super()
}
}
// -----------------------------------------------------------------------------
// Read
// -----------------------------------------------------------------------------
export class Read<
Env,
InErr,
InElem,
InDone,
OutErr2,
OutElem,
OutDone2,
OutErr,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2> {
readonly _tag = "Read"
constructor(
readonly more: (
i: InElem
) => Channel<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2>,
readonly done: ContinuationK<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
>
) {
super()
}
}
// -----------------------------------------------------------------------------
// SucceedNow
// -----------------------------------------------------------------------------
export class SucceedNow<OutDone> extends ChannelBase<
unknown,
unknown,
unknown,
unknown,
never,
never,
OutDone
> {
readonly _tag = "SucceedNow"
constructor(readonly terminal: OutDone) {
super()
}
}
// -----------------------------------------------------------------------------
// Fail
// -----------------------------------------------------------------------------
export class Fail<OutErr> extends ChannelBase<
unknown,
unknown,
unknown,
unknown,
OutErr,
never,
never
> {
readonly _tag = "Fail"
constructor(readonly error: Lazy<Cause<OutErr>>) {
super()
}
}
// -----------------------------------------------------------------------------
// FromEffect
// -----------------------------------------------------------------------------
export class FromEffect<Env, OutErr, OutDone> extends ChannelBase<
Env,
unknown,
unknown,
unknown,
OutErr,
never,
OutDone
> {
readonly _tag = "FromEffect"
constructor(readonly effect: Lazy<Effect<Env, OutErr, OutDone>>) {
super()
}
}
// -----------------------------------------------------------------------------
// Emit
// -----------------------------------------------------------------------------
export class Emit<OutElem, OutDone> extends ChannelBase<
unknown,
unknown,
unknown,
unknown,
never,
OutElem,
OutDone
> {
readonly _tag = "Emit"
constructor(readonly out: Lazy<OutElem>) {
super()
}
}
// -----------------------------------------------------------------------------
// Succeed
// -----------------------------------------------------------------------------
export class Succeed<OutDone> extends ChannelBase<
unknown,
unknown,
unknown,
unknown,
never,
never,
OutDone
> {
readonly _tag = "Succeed"
constructor(readonly effect: Lazy<OutDone>) {
super()
}
}
// -----------------------------------------------------------------------------
// Suspend
// -----------------------------------------------------------------------------
export class Suspend<
Env,
InErr,
InElem,
InDone,
OutErr,
OutElem,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone> {
readonly _tag = "Suspend"
constructor(
readonly effect: Lazy<Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>>
) {
super()
}
}
// -----------------------------------------------------------------------------
// Ensuring
// -----------------------------------------------------------------------------
export class Ensuring<
Env,
InErr,
InElem,
InDone,
OutErr,
OutElem,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone> {
readonly _tag = "Ensuring"
constructor(
readonly channel: Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>,
readonly finalizer: (exit: Exit<OutErr, OutDone>) => Effect<Env, never, unknown>
) {
super()
}
}
// -----------------------------------------------------------------------------
// ConcatAll
// -----------------------------------------------------------------------------
export class ConcatAll<
Env,
InErr,
InElem,
InDone,
OutErr,
OutElem2,
OutDone3,
OutElem,
OutDone,
OutDone2
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr, OutElem2, OutDone3> {
readonly _tag = "ConcatAll"
constructor(
readonly combineInners: (x: OutDone, y: OutDone) => OutDone,
readonly combineAll: (x: OutDone, y: OutDone2) => OutDone3,
readonly onPull: (
pr: UpstreamPullRequest<OutElem>
) => UpstreamPullStrategy<OutElem2>,
readonly onEmit: (o: OutElem2) => ChildExecutorDecision,
readonly value: Lazy<
Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone2>
>,
readonly k: (
o: OutElem
) => Channel<Env, InErr, InElem, InDone, OutErr, OutElem2, OutDone>
) {
super()
}
}
// -----------------------------------------------------------------------------
// Fold
// -----------------------------------------------------------------------------
export class Fold<
Env,
InErr,
InElem,
InDone,
OutErr2,
OutElem,
OutDone2,
OutErr,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2> {
readonly _tag = "Fold"
constructor(
readonly value: Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>,
readonly k: ContinuationK<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
>
) {
super()
}
}
// -----------------------------------------------------------------------------
// Bridge
// -----------------------------------------------------------------------------
export class Bridge<
Env,
InErr,
InElem,
InDone,
OutErr,
OutElem,
OutDone
> extends ChannelBase<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone> {
readonly _tag = "Bridge"
constructor(
readonly input: AsyncInputProducer<InErr, InElem, InDone>,
readonly channel: Channel<Env, unknown, unknown, unknown, OutErr, OutElem, OutDone>
) {
super()
}
}
// -----------------------------------------------------------------------------
// BracketOut
// -----------------------------------------------------------------------------
export class BracketOut<R, E, Z, OutDone> extends ChannelBase<
R,
unknown,
unknown,
unknown,
E,
Z,
OutDone
> {
readonly _tag = "BracketOut"
constructor(
readonly acquire: Lazy<Effect<R, E, Z>>,
readonly finalizer: (z: Z, exit: Exit<unknown, unknown>) => Effect.RIO<R, unknown>
) {
super()
}
}
// -----------------------------------------------------------------------------
// Provide
// -----------------------------------------------------------------------------
export class Provide<
R,
InErr,
InElem,
InDone,
OutErr,
OutElem,
OutDone
> extends ChannelBase<unknown, InErr, InElem, InDone, OutErr, OutElem, OutDone> {
readonly _tag = "Provide"
constructor(
readonly env: Lazy<Env<R>>,
readonly channel: Channel<R, InErr, InElem, InDone, OutErr, OutElem, OutDone>
) {
super()
}
}
/**
* @tsplus macro remove
*/
export function concrete<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>(
_: Channel<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>
): asserts _ is
| PipeTo<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone, any, any, any>
| Read<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone, any, any>
| SucceedNow<OutDone>
| Fail<OutErr>
| FromEffect<Env, OutErr, OutDone>
| Emit<OutElem, OutDone>
| Succeed<OutDone>
| Suspend<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>
| Ensuring<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>
| ConcatAll<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone, any, any, any>
| Fold<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone, any, any>
| Bridge<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>
| BracketOut<Env, OutErr, OutElem, OutDone>
| Provide<Env, InErr, InElem, InDone, OutErr, OutElem, OutDone>
{
//
}
// -----------------------------------------------------------------------------
// Continuation
// -----------------------------------------------------------------------------
export abstract class Continuation<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
> {
readonly [_Env]!: (_: Env) => void
readonly [_InErr]!: (_: InErr) => void
readonly [_InElem]!: (_: InElem) => void
readonly [_InDone]!: (_: InDone) => void
readonly [_OutErr]!: (_: OutErr) => OutErr
readonly [_OutDone]!: (_: OutDone) => OutDone
readonly [_OutErr2]!: () => OutErr2
readonly [_OutElem]!: () => OutElem
readonly [_OutDone2]!: () => OutDone2
}
/**
* @tsplus macro remove
*/
export function concreteContinuation<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
>(
_: Continuation<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
>
): asserts _ is
| ContinuationK<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
>
| ContinuationFinalizer<Env, OutErr, OutDone>
{
//
}
export class ContinuationK<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
> extends Continuation<
Env,
InErr,
InElem,
InDone,
OutErr,
OutErr2,
OutElem,
OutDone,
OutDone2
> {
readonly _tag = "ContinuationK"
constructor(
readonly onSuccess: (
o: OutDone
) => Channel<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2>,
readonly onHalt: (
c: Cause<OutErr>
) => Channel<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2>
) {
super()
}
onExit(
exit: Exit<OutErr, OutDone>
): Channel<Env, InErr, InElem, InDone, OutErr2, OutElem, OutDone2> {
switch (exit._tag) {
case "Failure": {
return this.onHalt(exit.cause)
}
case "Success": {
return this.onSuccess(exit.value)
}
}
}
}
export class ContinuationFinalizer<Env, OutErr, OutDone> extends Continuation<
Env,
unknown,
unknown,
unknown,
OutErr,
never,
never,
OutDone,
never
> {
readonly _tag = "ContinuationFinalizer"
constructor(readonly finalizer: (exit: Exit<OutErr, OutDone>) => Effect.RIO<Env, unknown>) {
super()
}
} | the_stack |
import { TestBed } from '@angular/core/testing';
import {
provideConfigFactory,
provideDefaultConfigFactory,
} from '@spartacus/core';
import { getEpdVisualizationDefaultConfig } from '@spartacus/epd-visualization/root';
import { Observable, of } from 'rxjs';
import { getTestConfig } from '../../../root/testing/epd-visualization-test-config';
import {
MetadatumValueType,
NodesResponse,
} from '../../connectors/scene/nodes-response';
import { SceneAdapter } from '../../connectors/scene/scene.adapter';
import { SceneNodeToProductLookupService } from './scene-node-to-product-lookup.service';
class MockSceneAdapter extends SceneAdapter {
getNodesFunc: (
_sceneId: string,
_nodeIds?: string[],
_expand?: string[],
_filter?: string[],
_contentType?: string
) => Observable<NodesResponse>;
getNodes(
sceneId: string,
nodeIds?: string[],
expand?: string[],
filter?: string[],
contentType?: string
): Observable<NodesResponse> {
return this.getNodesFunc(sceneId, nodeIds, expand, filter, contentType);
}
}
const nodesResponseOneProductCodePerSceneNode: NodesResponse = {
nodes: [
{
sid: 'sceneNode1',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCode1',
valueType: MetadatumValueType.string,
},
],
},
{
sid: 'sceneNode2',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCode2',
valueType: MetadatumValueType.string,
},
],
},
{
sid: 'sceneNode3',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCode3',
valueType: MetadatumValueType.string,
},
],
},
],
};
const nodesResponseMultipleProductCodesPerSceneNode: NodesResponse = {
nodes: [
{
sid: 'sceneNode1',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCodeA',
valueType: MetadatumValueType.string,
},
],
},
{
sid: 'sceneNode2',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCodeA',
valueType: MetadatumValueType.string,
},
],
},
{
sid: 'sceneNode3',
metadata: [
{
source: 'CommerceCloud',
category: 'SpareParts',
tag: 'ProductCode',
value: 'productCodeB',
valueType: MetadatumValueType.string,
},
],
},
],
};
const validateGetNodesParameters = (
sceneId: string,
nodeIds?: string[],
expand?: string[],
filter?: string[],
contentType?: string
) => {
expect(sceneId).toBe('some scene id');
expect(nodeIds).toBeUndefined(), expect(expand).toBeTruthy();
if (expand) {
expect(expand.length).toBe(2);
expect(expand[0]).toBe('hotspot');
expect(expand[1]).toBe('metadata[CommerceCloud].SpareParts.ProductCode');
}
expect(filter).toBeTruthy();
if (filter) {
expect(filter.length).toBe(1);
expect(filter[0]).toBe('metadata[CommerceCloud].SpareParts.ProductCode');
}
expect(contentType).toBe('*');
};
const getNodesOneProductCodePerSceneNode = (
sceneId: string,
nodeIds?: string[],
expand?: string[],
filter?: string[],
contentType?: string
): Observable<NodesResponse> => {
validateGetNodesParameters(sceneId, nodeIds, expand, filter, contentType);
return of(nodesResponseOneProductCodePerSceneNode);
};
const getNodesMultipleProductCodesPerSceneNode = (
sceneId: string,
nodeIds?: string[],
expand?: string[],
filter?: string[],
contentType?: string
): Observable<NodesResponse> => {
validateGetNodesParameters(sceneId, nodeIds, expand, filter, contentType);
return of(nodesResponseMultipleProductCodesPerSceneNode);
};
describe('SceneNodeToProductLookupService', () => {
const mockSceneAdapter = new MockSceneAdapter();
let sceneNodeToProductLookupService: SceneNodeToProductLookupService;
beforeEach(() => {
TestBed.configureTestingModule({
providers: [
provideConfigFactory(getTestConfig),
provideDefaultConfigFactory(getEpdVisualizationDefaultConfig),
{
provide: SceneAdapter,
useValue: mockSceneAdapter,
},
],
});
sceneNodeToProductLookupService = TestBed.inject(
SceneNodeToProductLookupService
);
});
describe('lookupNodeIds', () => {
it('should lookup node ids for given product codes', (done) => {
mockSceneAdapter.getNodesFunc = getNodesOneProductCodePerSceneNode;
const sceneNodeIdsBeforeMapPopulated =
sceneNodeToProductLookupService.syncLookupNodeIds([
'productCode2',
'productCode3',
'notPresent',
]);
expect(sceneNodeIdsBeforeMapPopulated).toBeTruthy();
expect(sceneNodeIdsBeforeMapPopulated.length).toBe(0);
sceneNodeToProductLookupService.populateMapsForScene('some scene id');
sceneNodeToProductLookupService
.lookupNodeIds(['productCode2', 'productCode3', 'notPresent'])
.subscribe((sceneNodeIds: string[]) => {
expect(sceneNodeIds).toBeTruthy();
expect(sceneNodeIds.length).toBe(2);
expect(sceneNodeIds[0]).toBe('sceneNode2');
expect(sceneNodeIds[1]).toBe('sceneNode3');
// The synchronous version should produce the same results at this point in time.
const sceneNodeIdsSync =
sceneNodeToProductLookupService.syncLookupNodeIds([
'productCode2',
'productCode3',
'notPresent',
]);
expect(sceneNodeIdsSync).toBeTruthy();
expect(sceneNodeIdsSync.length).toBe(2);
expect(sceneNodeIdsSync[0]).toBe('sceneNode2');
expect(sceneNodeIdsSync[1]).toBe('sceneNode3');
done();
});
});
it('should allow for multiple scene nodes with same product code', (done) => {
mockSceneAdapter.getNodesFunc = getNodesMultipleProductCodesPerSceneNode;
const sceneNodeIdsBeforeMapPopulated =
sceneNodeToProductLookupService.syncLookupProductCodes([
'productCodeA',
'productCodeB',
'notPresent',
]);
expect(sceneNodeIdsBeforeMapPopulated).toBeTruthy();
expect(sceneNodeIdsBeforeMapPopulated.length).toBe(0);
sceneNodeToProductLookupService.populateMapsForScene('some scene id');
sceneNodeToProductLookupService
.lookupNodeIds(['productCodeA'])
.subscribe((sceneNodeIds: string[]) => {
expect(sceneNodeIds).toBeTruthy();
expect(sceneNodeIds.length).toBe(2);
expect(sceneNodeIds[0]).toBe('sceneNode1');
expect(sceneNodeIds[1]).toBe('sceneNode2');
// The synchronous version should produce the same results at this point in time.
const sceneNodeIdsSync =
sceneNodeToProductLookupService.syncLookupNodeIds(['productCodeA']);
expect(sceneNodeIdsSync).toBeTruthy();
expect(sceneNodeIdsSync.length).toBe(2);
expect(sceneNodeIdsSync[0]).toBe('sceneNode1');
expect(sceneNodeIdsSync[1]).toBe('sceneNode2');
done();
});
});
});
describe('lookupProductCodes', () => {
it('should lookup product codes for given scene node ids', (done) => {
mockSceneAdapter.getNodesFunc = getNodesOneProductCodePerSceneNode;
const sceneNodeToProductLookupService: SceneNodeToProductLookupService =
TestBed.inject(SceneNodeToProductLookupService);
const productCodesBeforeMapPopulated =
sceneNodeToProductLookupService.syncLookupProductCodes([
'sceneNode2',
'sceneNode3',
'notPresent',
]);
expect(productCodesBeforeMapPopulated).toBeTruthy();
expect(productCodesBeforeMapPopulated.length).toBe(0);
sceneNodeToProductLookupService.populateMapsForScene('some scene id');
sceneNodeToProductLookupService
.lookupProductCodes(['sceneNode2', 'sceneNode3', 'notPresent'])
.subscribe((productCodes: string[]) => {
expect(productCodes).toBeTruthy();
expect(productCodes.length).toBe(2);
expect(productCodes[0]).toBe('productCode2');
expect(productCodes[1]).toBe('productCode3');
// The synchronous version should produce the same results at this point in time.
const productCodesSync =
sceneNodeToProductLookupService.syncLookupProductCodes([
'sceneNode2',
'sceneNode3',
'notPresent',
]);
expect(productCodesSync).toBeTruthy();
expect(productCodesSync.length).toBe(2);
expect(productCodesSync[0]).toBe('productCode2');
expect(productCodesSync[1]).toBe('productCode3');
done();
});
});
it('should allow for multiple scene nodes with same product code', (done) => {
mockSceneAdapter.getNodesFunc = getNodesMultipleProductCodesPerSceneNode;
const sceneNodeToProductLookupService: SceneNodeToProductLookupService =
TestBed.inject(SceneNodeToProductLookupService);
const productCodesBeforeMapPopulated =
sceneNodeToProductLookupService.syncLookupProductCodes([
'sceneNode1',
'sceneNode2',
'sceneNode3',
'notPresent',
]);
expect(productCodesBeforeMapPopulated).toBeTruthy();
expect(productCodesBeforeMapPopulated.length).toBe(0);
sceneNodeToProductLookupService.populateMapsForScene('some scene id');
sceneNodeToProductLookupService
.lookupProductCodes([
'sceneNode1',
'sceneNode2',
'sceneNode3',
'notPresent',
])
.subscribe((productCodes: string[]) => {
expect(productCodes).toBeTruthy();
expect(productCodes.length).toBe(2);
expect(productCodes[0]).toBe('productCodeA');
expect(productCodes[1]).toBe('productCodeB');
// The synchronous version should produce the same results at this point in time.
const productCodesSync =
sceneNodeToProductLookupService.syncLookupProductCodes([
'sceneNode1',
'sceneNode2',
'sceneNode3',
'notPresent',
]);
expect(productCodesSync).toBeTruthy();
expect(productCodesSync.length).toBe(2);
expect(productCodesSync[0]).toBe('productCodeA');
expect(productCodesSync[1]).toBe('productCodeB');
done();
});
});
});
}); | the_stack |
import { ServiceClientOptions, RequestOptions, ServiceCallback, HttpOperationResponse } from 'ms-rest';
import * as models from '../models';
/**
* @class
* Workspaces
* __NOTE__: An instance of this class is automatically created for an
* instance of the DatabricksClient.
*/
export interface Workspaces {
/**
* Gets the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workspace>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
getWithHttpOperationResponse(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workspace>>;
/**
* Gets the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workspace} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workspace} [result] - The deserialized result object if an error did not occur.
* See {@link Workspace} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
get(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workspace>;
get(resourceGroupName: string, workspaceName: string, callback: ServiceCallback<models.Workspace>): void;
get(resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workspace>): void;
/**
* Deletes the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
deleteMethodWithHttpOperationResponse(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Deletes the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
deleteMethod(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
deleteMethod(resourceGroupName: string, workspaceName: string, callback: ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Creates a new workspace.
*
* @param {object} parameters Parameters supplied to the create or update a
* workspace.
*
* @param {string} parameters.managedResourceGroupId The managed resource group
* Id.
*
* @param {object} [parameters.parameters] Name and value pairs that define the
* workspace parameters.
*
* @param {string} [parameters.uiDefinitionUri] The blob URI where the UI
* definition file is located.
*
* @param {array} [parameters.authorizations] The workspace provider
* authorizations.
*
* @param {object} [parameters.sku] The SKU of the resource.
*
* @param {string} parameters.sku.name The SKU name.
*
* @param {string} [parameters.sku.tier] The SKU tier.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} parameters.location The geo-location where the resource
* lives
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workspace>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
createOrUpdateWithHttpOperationResponse(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workspace>>;
/**
* Creates a new workspace.
*
* @param {object} parameters Parameters supplied to the create or update a
* workspace.
*
* @param {string} parameters.managedResourceGroupId The managed resource group
* Id.
*
* @param {object} [parameters.parameters] Name and value pairs that define the
* workspace parameters.
*
* @param {string} [parameters.uiDefinitionUri] The blob URI where the UI
* definition file is located.
*
* @param {array} [parameters.authorizations] The workspace provider
* authorizations.
*
* @param {object} [parameters.sku] The SKU of the resource.
*
* @param {string} parameters.sku.name The SKU name.
*
* @param {string} [parameters.sku.tier] The SKU tier.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} parameters.location The geo-location where the resource
* lives
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workspace} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workspace} [result] - The deserialized result object if an error did not occur.
* See {@link Workspace} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
createOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workspace>;
createOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, callback: ServiceCallback<models.Workspace>): void;
createOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workspace>): void;
/**
* Updates a workspace.
*
* @param {object} parameters The update to the workspace.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workspace>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
updateWithHttpOperationResponse(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workspace>>;
/**
* Updates a workspace.
*
* @param {object} parameters The update to the workspace.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workspace} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workspace} [result] - The deserialized result object if an error did not occur.
* See {@link Workspace} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
update(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workspace>;
update(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, callback: ServiceCallback<models.Workspace>): void;
update(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workspace>): void;
/**
* Gets all the workspaces within a resource group.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByResourceGroupWithHttpOperationResponse(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceListResult>>;
/**
* Gets all the workspaces within a resource group.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceListResult} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByResourceGroup(resourceGroupName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceListResult>;
listByResourceGroup(resourceGroupName: string, callback: ServiceCallback<models.WorkspaceListResult>): void;
listByResourceGroup(resourceGroupName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceListResult>): void;
/**
* Gets all the workspaces within a subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySubscriptionWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceListResult>>;
/**
* Gets all the workspaces within a subscription.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceListResult} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySubscription(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceListResult>;
listBySubscription(callback: ServiceCallback<models.WorkspaceListResult>): void;
listBySubscription(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceListResult>): void;
/**
* Deletes the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<null>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginDeleteMethodWithHttpOperationResponse(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<void>>;
/**
* Deletes the workspace.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {null} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {null} [result] - The deserialized result object if an error did not occur.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginDeleteMethod(resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<void>;
beginDeleteMethod(resourceGroupName: string, workspaceName: string, callback: ServiceCallback<void>): void;
beginDeleteMethod(resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<void>): void;
/**
* Creates a new workspace.
*
* @param {object} parameters Parameters supplied to the create or update a
* workspace.
*
* @param {string} parameters.managedResourceGroupId The managed resource group
* Id.
*
* @param {object} [parameters.parameters] Name and value pairs that define the
* workspace parameters.
*
* @param {string} [parameters.uiDefinitionUri] The blob URI where the UI
* definition file is located.
*
* @param {array} [parameters.authorizations] The workspace provider
* authorizations.
*
* @param {object} [parameters.sku] The SKU of the resource.
*
* @param {string} parameters.sku.name The SKU name.
*
* @param {string} [parameters.sku.tier] The SKU tier.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} parameters.location The geo-location where the resource
* lives
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workspace>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginCreateOrUpdateWithHttpOperationResponse(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workspace>>;
/**
* Creates a new workspace.
*
* @param {object} parameters Parameters supplied to the create or update a
* workspace.
*
* @param {string} parameters.managedResourceGroupId The managed resource group
* Id.
*
* @param {object} [parameters.parameters] Name and value pairs that define the
* workspace parameters.
*
* @param {string} [parameters.uiDefinitionUri] The blob URI where the UI
* definition file is located.
*
* @param {array} [parameters.authorizations] The workspace provider
* authorizations.
*
* @param {object} [parameters.sku] The SKU of the resource.
*
* @param {string} parameters.sku.name The SKU name.
*
* @param {string} [parameters.sku.tier] The SKU tier.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} parameters.location The geo-location where the resource
* lives
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workspace} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workspace} [result] - The deserialized result object if an error did not occur.
* See {@link Workspace} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginCreateOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workspace>;
beginCreateOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, callback: ServiceCallback<models.Workspace>): void;
beginCreateOrUpdate(parameters: models.Workspace, resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workspace>): void;
/**
* Updates a workspace.
*
* @param {object} parameters The update to the workspace.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<Workspace>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
beginUpdateWithHttpOperationResponse(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.Workspace>>;
/**
* Updates a workspace.
*
* @param {object} parameters The update to the workspace.
*
* @param {object} [parameters.tags] Resource tags.
*
* @param {string} resourceGroupName The name of the resource group. The name
* is case insensitive.
*
* @param {string} workspaceName The name of the workspace.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {Workspace} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {Workspace} [result] - The deserialized result object if an error did not occur.
* See {@link Workspace} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
beginUpdate(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.Workspace>;
beginUpdate(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, callback: ServiceCallback<models.Workspace>): void;
beginUpdate(parameters: models.WorkspaceUpdate, resourceGroupName: string, workspaceName: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.Workspace>): void;
/**
* Gets all the workspaces within a resource group.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listByResourceGroupNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceListResult>>;
/**
* Gets all the workspaces within a resource group.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceListResult} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listByResourceGroupNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceListResult>;
listByResourceGroupNext(nextPageLink: string, callback: ServiceCallback<models.WorkspaceListResult>): void;
listByResourceGroupNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceListResult>): void;
/**
* Gets all the workspaces within a subscription.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<WorkspaceListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listBySubscriptionNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.WorkspaceListResult>>;
/**
* Gets all the workspaces within a subscription.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {WorkspaceListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {WorkspaceListResult} [result] - The deserialized result object if an error did not occur.
* See {@link WorkspaceListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listBySubscriptionNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.WorkspaceListResult>;
listBySubscriptionNext(nextPageLink: string, callback: ServiceCallback<models.WorkspaceListResult>): void;
listBySubscriptionNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.WorkspaceListResult>): void;
}
/**
* @class
* Operations
* __NOTE__: An instance of this class is automatically created for an
* instance of the DatabricksClient.
*/
export interface Operations {
/**
* Lists all of the available RP operations.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<OperationListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listWithHttpOperationResponse(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationListResult>>;
/**
* Lists all of the available RP operations.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {OperationListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {OperationListResult} [result] - The deserialized result object if an error did not occur.
* See {@link OperationListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
list(options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationListResult>;
list(callback: ServiceCallback<models.OperationListResult>): void;
list(options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationListResult>): void;
/**
* Lists all of the available RP operations.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @returns {Promise} A promise is returned
*
* @resolve {HttpOperationResponse<OperationListResult>} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*/
listNextWithHttpOperationResponse(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<HttpOperationResponse<models.OperationListResult>>;
/**
* Lists all of the available RP operations.
*
* @param {string} nextPageLink The NextLink from the previous successful call
* to List operation.
*
* @param {object} [options] Optional Parameters.
*
* @param {object} [options.customHeaders] Headers that will be added to the
* request
*
* @param {ServiceCallback} [optionalCallback] - The optional callback.
*
* @returns {ServiceCallback|Promise} If a callback was passed as the last
* parameter then it returns the callback else returns a Promise.
*
* {Promise} A promise is returned.
*
* @resolve {OperationListResult} - The deserialized result object.
*
* @reject {Error|ServiceError} - The error object.
*
* {ServiceCallback} optionalCallback(err, result, request, response)
*
* {Error|ServiceError} err - The Error object if an error occurred, null otherwise.
*
* {OperationListResult} [result] - The deserialized result object if an error did not occur.
* See {@link OperationListResult} for more information.
*
* {WebResource} [request] - The HTTP Request object if an error did not occur.
*
* {http.IncomingMessage} [response] - The HTTP Response stream if an error did not occur.
*/
listNext(nextPageLink: string, options?: { customHeaders? : { [headerName: string]: string; } }): Promise<models.OperationListResult>;
listNext(nextPageLink: string, callback: ServiceCallback<models.OperationListResult>): void;
listNext(nextPageLink: string, options: { customHeaders? : { [headerName: string]: string; } }, callback: ServiceCallback<models.OperationListResult>): void;
} | the_stack |
import * as asn1js from "asn1js";
import { bufferToHexCodes, stringToArrayBuffer } from "pvutils";
import { getAlgorithmParameters, getCrypto } from "pkijs/src/common";
import Certificate from "pkijs/src/Certificate";
import CertificateRevocationList from "pkijs/src/CertificateRevocationList";
import AttributeTypeAndValue from "pkijs/src/AttributeTypeAndValue";
import Extension from "pkijs/src/Extension";
import Attribute from "pkijs/src/Attribute";
import SignedData from "pkijs/src/SignedData";
import EncapsulatedContentInfo from "pkijs/src/EncapsulatedContentInfo";
import SignerInfo from "pkijs/src/SignerInfo";
import IssuerAndSerialNumber from "pkijs/src/IssuerAndSerialNumber";
import SignedAndUnsignedAttributes from "pkijs/src/SignedAndUnsignedAttributes";
import ContentInfo from "pkijs/src/ContentInfo";
import RelativeDistinguishedNames from "pkijs/src/RelativeDistinguishedNames";
import OCSPRequest from "pkijs/src/OCSPRequest";
import DistributionPoint from "pkijs/src/DistributionPoint";
import GeneralName from "pkijs/src/GeneralName";
// *********************************************************************************
let cmsSignedBuffer = new ArrayBuffer(0); // ArrayBuffer with loaded or created CMS_Signed
const trustedCertificates: Certificate[] = []; // Array of root certificates from "CA Bundle"
// *********************************************************************************
// region Auxiliary functions
// *********************************************************************************
function formatPEM(pemString: string) {
const stringLength = pemString.length;
let resultString = "";
for (let i = 0, count = 0; i < stringLength; i++) {
if (count > 63) {
resultString = `${resultString}\r\n`;
count = 0;
}
count++;
resultString = resultString + pemString[i];
}
return resultString;
}
// *********************************************************************************
// endregion
// *********************************************************************************
// region Parse "CA Bundle" file
// *********************************************************************************
function parseCAbundle(buffer: ArrayBuffer) {
// region Initial variables
const base64Chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
const startChars = "-----BEGIN CERTIFICATE-----";
const endChars = "-----END CERTIFICATE-----";
const endLineChars = "\r\n";
const view = new Uint8Array(buffer);
let waitForStart = false;
let middleStage = true;
let waitForEnd = false;
let waitForEndLine = false;
let started = false;
let certBodyEncoded = "";
// endregion
for (let i = 0; i < view.length; i++) {
if (started === true) {
if (base64Chars.indexOf(String.fromCharCode(view[i])) !== (-1))
certBodyEncoded = certBodyEncoded + String.fromCharCode(view[i]);
else {
if (String.fromCharCode(view[i]) === "-") {
// region Decoded trustedCertificates
const asn1 = asn1js.fromBER(stringToArrayBuffer(window.atob(certBodyEncoded)));
try {
trustedCertificates.push(new Certificate({ schema: asn1.result }));
}
catch (ex) {
alert("Wrong certificate format");
return;
}
// endregion
// region Set all "flag variables"
certBodyEncoded = "";
started = false;
waitForEnd = true;
// endregion
}
}
}
else {
if (waitForEndLine === true) {
if (endLineChars.indexOf(String.fromCharCode(view[i])) === (-1)) {
waitForEndLine = false;
if (waitForEnd === true) {
waitForEnd = false;
middleStage = true;
}
else {
if (waitForStart === true) {
waitForStart = false;
started = true;
certBodyEncoded = certBodyEncoded + String.fromCharCode(view[i]);
}
else
middleStage = true;
}
}
}
else {
if (middleStage === true) {
if (String.fromCharCode(view[i]) === "-") {
if (i === 0 || String.fromCharCode(view[i - 1]) === "\r" || String.fromCharCode(view[i - 1]) === "\n") {
middleStage = false;
waitForStart = true;
}
}
}
else {
if (waitForStart === true) {
if (startChars.indexOf(String.fromCharCode(view[i])) === (-1))
waitForEndLine = true;
}
else {
if (waitForEnd === true) {
if (endChars.indexOf(String.fromCharCode(view[i])) === (-1))
waitForEndLine = true;
}
}
}
}
}
}
}
// *********************************************************************************
// endregion
// *********************************************************************************
// region Parse existing CMS_Signed
// *********************************************************************************
export function parseCMSSigned() {
// region Initial check
if (cmsSignedBuffer.byteLength === 0) {
alert("Nothing to parse!");
return;
}
// endregion
// region Initial activities
document.getElementById("cms-dgst-algos").innerHTML = "";
document.getElementById("cms-certs").style.display = "none";
document.getElementById("cms-crls").style.display = "none";
const certificatesTable = document.getElementById("cms-certificates") as HTMLTableElement;
while (certificatesTable.rows.length > 1)
certificatesTable.deleteRow(certificatesTable.rows.length - 1);
const crlsTable = document.getElementById("cms-rev-lists") as HTMLTableElement;
while (crlsTable.rows.length > 1)
crlsTable.deleteRow(crlsTable.rows.length - 1);
// endregion
// region Decode existing CMS Signed Data
const asn1 = asn1js.fromBER(cmsSignedBuffer);
const cmsContentSimpl = new ContentInfo({ schema: asn1.result });
const cmsSignedSimpl = new SignedData({ schema: cmsContentSimpl.content });
// endregion
// region Put information about digest algorithms in the CMS Signed Data
const dgstmap: { [oid: string]: string } = {
"1.3.14.3.2.26": "SHA-1",
"2.16.840.1.101.3.4.2.1": "SHA-256",
"2.16.840.1.101.3.4.2.2": "SHA-384",
"2.16.840.1.101.3.4.2.3": "SHA-512"
};
for (let i = 0; i < cmsSignedSimpl.digestAlgorithms.length; i++) {
let typeval = dgstmap[cmsSignedSimpl.digestAlgorithms[i].algorithmId];
if (typeof typeval === "undefined")
typeval = cmsSignedSimpl.digestAlgorithms[i].algorithmId;
const ulrow = `<li><p><span>${typeval}</span></p></li>`;
document.getElementById("cms-dgst-algos").innerHTML = document.getElementById("cms-dgst-algos").innerHTML + ulrow;
}
// endregion
// region Put information about encapsulated content type
const contypemap: { [oid: string]: string } = {
"1.3.6.1.4.1.311.2.1.4": "Authenticode signing information",
"1.2.840.113549.1.7.1": "Data content"
};
let eContentType = contypemap[cmsSignedSimpl.encapContentInfo.eContentType];
if (typeof eContentType === "undefined")
eContentType = cmsSignedSimpl.encapContentInfo.eContentType;
document.getElementById("cms-encap-type").innerHTML = eContentType;
// endregion
// region Put information about included certificates
const rdnmap: { [oid: string]: string } = {
"2.5.4.6": "C",
"2.5.4.10": "OU",
"2.5.4.11": "O",
"2.5.4.3": "CN",
"2.5.4.7": "L",
"2.5.4.8": "S",
"2.5.4.12": "T",
"2.5.4.42": "GN",
"2.5.4.43": "I",
"2.5.4.4": "SN",
"1.2.840.113549.1.9.1": "E-mail"
};
if ("certificates" in cmsSignedSimpl) {
for (let cert of cmsSignedSimpl.certificates) {
if (cert instanceof Certificate) {
let ul = "<ul>";
for (let i = 0; i < cert.issuer.typesAndValues.length; i++) {
let typeval = rdnmap[cert.issuer.typesAndValues[i].type.toString()];
if (typeof typeval === "undefined")
typeval = cert.issuer.typesAndValues[i].type.toString();
const subjval = cert.issuer.typesAndValues[i].value.valueBlock.value;
const ulrow = `<li><p><span>${typeval}</span> ${subjval}</p></li>`;
ul = ul + ulrow;
}
ul = `${ul}</ul>`;
const row = certificatesTable.insertRow(certificatesTable.rows.length);
const cell0 = row.insertCell(0);
cell0.innerHTML = bufferToHexCodes(cert.serialNumber.valueBlock.valueHex);
const cell1 = row.insertCell(1);
cell1.innerHTML = ul;
}
}
document.getElementById("cms-certs").style.display = "block";
}
// endregion
// region Put information about included CRLs
if ("crls" in cmsSignedSimpl) {
for (let crl of cmsSignedSimpl.crls) {
if (crl instanceof CertificateRevocationList) {
let ul = "<ul>";
for (let i = 0; i < crl.issuer.typesAndValues.length; i++) {
let typeval = rdnmap[crl.issuer.typesAndValues[i].type.toString()];
if (typeof typeval === "undefined")
typeval = crl.issuer.typesAndValues[i].type.toString();
const subjval = crl.issuer.typesAndValues[i].value.valueBlock.value;
const ulrow = `<li><p><span>${typeval}</span> ${subjval}</p></li>`;
ul = ul + ulrow;
}
ul = `${ul}</ul>`;
const row = crlsTable.insertRow(certificatesTable.rows.length);
const cell = row.insertCell(0);
cell.innerHTML = ul;
}
}
document.getElementById("cms-certs").style.display = "block";
}
// endregion
// region Put information about number of signers
document.getElementById("cms-signs").innerHTML = cmsSignedSimpl.signerInfos.length.toString();
// endregion
document.getElementById("cms-signed-data-block").style.display = "block";
}
// *********************************************************************************
// endregion
// *********************************************************************************
// region Create CMS_Signed
// *********************************************************************************
export function createCMSSigned(buffer: ArrayBuffer) {
// region Initial variables
let sequence = Promise.resolve(null);
const certSimpl = new Certificate();
let cmsSignedSimpl: SignedData;
let publicKey: CryptoKey;
let privateKey: CryptoKey;
let hashAlgorithm: string;
const hashOption = (document.getElementById("hash_alg") as HTMLInputElement).value;
switch (hashOption) {
case "alg_SHA1":
hashAlgorithm = "sha-1";
break;
case "alg_SHA256":
hashAlgorithm = "sha-256";
break;
case "alg_SHA384":
hashAlgorithm = "sha-384";
break;
case "alg_SHA512":
hashAlgorithm = "sha-512";
break;
default:
}
let signatureAlgorithmName: string;
const signOption = (document.getElementById("sign_alg") as HTMLInputElement).value;
switch (signOption) {
case "alg_RSA15":
signatureAlgorithmName = "RSASSA-PKCS1-V1_5";
break;
case "alg_RSA2":
signatureAlgorithmName = "RSA-PSS";
break;
case "alg_ECDSA":
signatureAlgorithmName = "ECDSA";
break;
default:
}
// endregion
// region Get a "crypto" extension
const crypto = getCrypto();
if (typeof crypto === "undefined") {
alert("No WebCrypto extension found");
return;
}
// endregion
// region Put a static values
certSimpl.version = 2;
certSimpl.serialNumber = new asn1js.Integer({ value: 1 });
certSimpl.issuer.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.6", // Country name
value: new asn1js.PrintableString({ value: "RU" })
}));
certSimpl.issuer.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.3", // Common name
value: new asn1js.BmpString({ value: "Test" })
}));
certSimpl.subject.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.6", // Country name
value: new asn1js.PrintableString({ value: "RU" })
}));
certSimpl.subject.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.3", // Common name
value: new asn1js.BmpString({ value: "Test" })
}));
certSimpl.notBefore.value = new Date(2016, 1, 1);
certSimpl.notAfter.value = new Date(2019, 1, 1);
certSimpl.extensions = []; // Extensions are not a part of certificate by default, it's an optional array
// region "KeyUsage" extension
const bitArray = new ArrayBuffer(1);
const bitView = new Uint8Array(bitArray);
bitView[0] = bitView[0] | 0x02; // Key usage "cRLSign" flag
// bitView[0] = bitView[0] | 0x04; // Key usage "keyCertSign" flag
const keyUsage = new asn1js.BitString({ valueHex: bitArray });
certSimpl.extensions.push(new Extension({
extnID: "2.5.29.15",
critical: false,
extnValue: keyUsage.toBER(false),
parsedValue: keyUsage // Parsed value for well-known extensions
}));
// endregion
// endregion
// region Create a new key pair
sequence = sequence.then(
() => {
// region Get default algorithm parameters for key generation
const algorithm = getAlgorithmParameters(signatureAlgorithmName, "generatekey");
if ("hash" in algorithm.algorithm)
(algorithm.algorithm as any).hash.name = hashAlgorithm;
// endregion
return crypto.generateKey(algorithm.algorithm as any, true, algorithm.usages);
}
);
// endregion
// region Store new key in an interim variables
sequence = sequence.then(
(keyPair: CryptoKeyPair) => {
publicKey = keyPair.publicKey;
privateKey = keyPair.privateKey;
},
(error: Error) => alert(`Error during key generation: ${error}`)
);
// endregion
// region Exporting public key into "subjectPublicKeyInfo" value of certificate
sequence = sequence.then(
() => certSimpl.subjectPublicKeyInfo.importKey(publicKey)
);
// endregion
// region Signing final certificate
sequence = sequence.then(
() => certSimpl.sign(privateKey, hashAlgorithm),
error => alert(`Error during exporting public key: ${error}`)
);
// endregion
// region Encode and store certificate
sequence = sequence.then(
() => {
const certSimplEncoded = certSimpl.toSchema(true).toBER(false);
const certSimplString = String.fromCharCode.apply(null, new Uint8Array(certSimplEncoded));
let resultString = "-----BEGIN CERTIFICATE-----\r\n";
resultString = resultString + formatPEM(window.btoa(certSimplString));
resultString = `${resultString}\r\n-----END CERTIFICATE-----\r\n`;
document.getElementById("new_signed_data").innerHTML = resultString;
alert("Certificate created successfully!");
},
error => alert(`Error during signing: ${error}`)
);
// endregion
// region Exporting private key
sequence = sequence.then(
() => crypto.exportKey("pkcs8", privateKey)
);
// endregion
// region Store exported key on Web page
sequence = sequence.then(
result => {
const privateKeyString = String.fromCharCode.apply(null, new Uint8Array(result));
let resultString = document.getElementById("new_signed_data").innerHTML;
resultString = `${resultString}\r\n-----BEGIN PRIVATE KEY-----\r\n`;
resultString = resultString + formatPEM(window.btoa(privateKeyString));
resultString = `${resultString}\r\n-----END PRIVATE KEY-----\r\n`;
document.getElementById("new_signed_data").innerHTML = resultString;
alert("Private key exported successfully!");
},
error => alert(`Error during exporting of private key: ${error}`)
);
// endregion
// region Check if user wants us to include signed extensions
if ((document.getElementById("add_ext") as HTMLInputElement).checked) {
// region Create a message digest
sequence = sequence.then(
() => crypto.digest({ name: hashAlgorithm }, new Uint8Array(buffer))
);
// endregion
// region Combine all signed extensions
sequence = sequence.then(
result => {
const signedAttr: Attribute[] = [];
signedAttr.push(new Attribute({
type: "1.2.840.113549.1.9.3",
values: [
new asn1js.ObjectIdentifier({ value: "1.2.840.113549.1.7.1" })
]
})); // contentType
signedAttr.push(new Attribute({
type: "1.2.840.113549.1.9.5",
values: [
new asn1js.UTCTime({ valueDate: new Date() })
]
})); // signingTime
signedAttr.push(new Attribute({
type: "1.2.840.113549.1.9.4",
values: [
new asn1js.OctetString({ valueHex: result })
]
})); // messageDigest
return signedAttr;
}
);
// endregion
}
// endregion
// region Initialize CMS Signed Data structures and sign it
sequence = sequence.then(
result => {
cmsSignedSimpl = new SignedData({
version: 1,
encapContentInfo: new EncapsulatedContentInfo({
eContentType: "1.2.840.113549.1.7.1" // "data" content type
}),
signerInfos: [
new SignerInfo({
version: 1,
sid: new IssuerAndSerialNumber({
issuer: certSimpl.issuer,
serialNumber: certSimpl.serialNumber
})
})
],
certificates: [certSimpl]
});
if ((document.getElementById("add_ext") as HTMLInputElement).checked) {
cmsSignedSimpl.signerInfos[0].signedAttrs = new SignedAndUnsignedAttributes({
type: 0,
attributes: result
});
}
if ((document.getElementById("detached_signature") as HTMLInputElement).checked === false) {
const contentInfo = new EncapsulatedContentInfo({
eContent: new asn1js.OctetString({ valueHex: buffer })
});
cmsSignedSimpl.encapContentInfo.eContent = contentInfo.eContent;
return cmsSignedSimpl.sign(privateKey, 0, hashAlgorithm);
}
return cmsSignedSimpl.sign(privateKey, 0, hashAlgorithm, buffer);
}
);
// endregion
// region Create final result
sequence.then(
() => {
const cmsSignedSchema = cmsSignedSimpl.toSchema(true);
const cmsContentSimp = new ContentInfo({
contentType: "1.2.840.113549.1.7.2",
content: cmsSignedSchema
});
const _cmsSignedSchema = cmsContentSimp.toSchema();
// region Make length of some elements in "indefinite form"
_cmsSignedSchema.lenBlock.isIndefiniteForm = true;
const block1 = _cmsSignedSchema.valueBlock.value[1];
block1.lenBlock.isIndefiniteForm = true;
const block2 = block1.valueBlock.value[0];
block2.lenBlock.isIndefiniteForm = true;
if ((document.getElementById("detached_signature") as HTMLInputElement).checked === false) {
const block3 = block2.valueBlock.value[2];
block3.lenBlock.isIndefiniteForm = true;
block3.valueBlock.value[1].lenBlock.isIndefiniteForm = true;
block3.valueBlock.value[1].valueBlock.value[0].lenBlock.isIndefiniteForm = true;
}
// endregion
cmsSignedBuffer = _cmsSignedSchema.toBER(false);
// region Convert ArrayBuffer to String
let signedDataString = "";
const view = new Uint8Array(cmsSignedBuffer);
for (let i = 0; i < view.length; i++)
signedDataString = signedDataString + String.fromCharCode(view[i]);
// endregion
let resultString = document.getElementById("new_signed_data").innerHTML;
resultString = `${resultString}\r\n-----BEGIN CMS-----\r\n`;
resultString = resultString + formatPEM(window.btoa(signedDataString));
resultString = `${resultString}\r\n-----END CMS-----\r\n\r\n`;
document.getElementById("new_signed_data").innerHTML = resultString;
parseCMSSigned();
alert("CMS Signed Data created successfully!");
},
error =>
alert(`Erorr during signing of CMS Signed Data: ${error}`)
);
// endregion
}
// *********************************************************************************
// endregion
// *********************************************************************************
// region Verify existing CMS_Signed
// *********************************************************************************
function verifyCMSSigned() {
// region Initial check
if (cmsSignedBuffer.byteLength === 0) {
alert("Nothing to verify!");
return;
}
// endregion
// region Decode existing CMS_Signed
const asn1 = asn1js.fromBER(cmsSignedBuffer);
const cmsContentSimpl = new ContentInfo({ schema: asn1.result });
const cmsSignedSimpl = new SignedData({ schema: cmsContentSimpl.content });
// endregion
// region Verify CMS_Signed
cmsSignedSimpl.verify({ signer: 0, trustedCerts: trustedCertificates }).
then(
result => alert(`Verification result: ${result}`),
error => alert(`Error during verification: ${error}`)
);
// endregion
}
function handleFileBrowse(evt: Event) {
const tempReader = new FileReader();
const currentFiles = (evt.target as any).files;
tempReader.onload =
event => createCMSSigned((event.target as any).result);
tempReader.readAsArrayBuffer(currentFiles[0]);
}
function handleParsingFile(evt: Event) {
const tempReader = new FileReader();
const currentFiles = (evt.target as any).files;
tempReader.onload =
event => {
cmsSignedBuffer = (event.target as any).result;
parseCMSSigned();
};
tempReader.readAsArrayBuffer(currentFiles[0]);
}
function handleCABundle(evt: Event) {
const tempReader = new FileReader();
const currentFiles = (evt.target as any).files;
tempReader.onload =
event => parseCAbundle((event.target as any).result);
tempReader.readAsArrayBuffer(currentFiles[0]);
}
function typetest_RelativeDN_isEqual() {
const rdn1 = new RelativeDistinguishedNames();
const rdn2 = new RelativeDistinguishedNames();
const arraybuf = new ArrayBuffer(1);
rdn1.isEqual(rdn2); // $ExpectType boolean
rdn1.isEqual(arraybuf); // $ExpectType boolean
}
function ocspRequestFromCert() {
// region Initial variables
let sequence = Promise.resolve(null);
const certSimpl = new Certificate();
let publicKey: CryptoKey;
let privateKey: CryptoKey;
let hashAlgorithm: string;
const hashOption = (document.getElementById("hash_alg") as HTMLInputElement).value;
switch (hashOption) {
case "alg_SHA1":
hashAlgorithm = "sha-1";
break;
case "alg_SHA256":
hashAlgorithm = "sha-256";
break;
case "alg_SHA384":
hashAlgorithm = "sha-384";
break;
case "alg_SHA512":
hashAlgorithm = "sha-512";
break;
default:
}
let signatureAlgorithmName: string;
const signOption = (document.getElementById("sign_alg") as HTMLInputElement).value;
switch (signOption) {
case "alg_RSA15":
signatureAlgorithmName = "RSASSA-PKCS1-V1_5";
break;
case "alg_RSA2":
signatureAlgorithmName = "RSA-PSS";
break;
case "alg_ECDSA":
signatureAlgorithmName = "ECDSA";
break;
default:
}
// endregion
// region Get a "crypto" extension
const crypto = getCrypto();
if (typeof crypto === "undefined") {
alert("No WebCrypto extension found");
return;
}
// endregion
// region Put a static values
certSimpl.version = 2;
certSimpl.serialNumber = new asn1js.Integer({ value: 1 });
certSimpl.issuer.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.6", // Country name
value: new asn1js.PrintableString({ value: "RU" })
}));
certSimpl.issuer.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.3", // Common name
value: new asn1js.BmpString({ value: "Test" })
}));
certSimpl.subject.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.6", // Country name
value: new asn1js.PrintableString({ value: "RU" })
}));
certSimpl.subject.typesAndValues.push(new AttributeTypeAndValue({
type: "2.5.4.3", // Common name
value: new asn1js.BmpString({ value: "Test" })
}));
certSimpl.notBefore.value = new Date(2016, 1, 1);
certSimpl.notAfter.value = new Date(2019, 1, 1);
certSimpl.extensions = []; // Extensions are not a part of certificate by default, it's an optional array
// region "KeyUsage" extension
const bitArray = new ArrayBuffer(1);
const bitView = new Uint8Array(bitArray);
bitView[0] = bitView[0] | 0x02; // Key usage "cRLSign" flag
// bitView[0] = bitView[0] | 0x04; // Key usage "keyCertSign" flag
const keyUsage = new asn1js.BitString({ valueHex: bitArray });
certSimpl.extensions.push(new Extension({
extnID: "2.5.29.15",
critical: false,
extnValue: keyUsage.toBER(false),
parsedValue: keyUsage // Parsed value for well-known extensions
}));
// endregion
// endregion
// region Create a new key pair
sequence = sequence.then(
() => {
// region Get default algorithm parameters for key generation
const algorithm = getAlgorithmParameters(signatureAlgorithmName, "generatekey");
if ("hash" in algorithm.algorithm)
(algorithm.algorithm as any).hash.name = hashAlgorithm;
// endregion
return crypto.generateKey(algorithm.algorithm as any, true, algorithm.usages);
}
);
// endregion
// region Store new key in an interim variables
sequence = sequence.then(
(keyPair: CryptoKeyPair) => {
publicKey = keyPair.publicKey;
privateKey = keyPair.privateKey;
},
(error: Error) => alert(`Error during key generation: ${error}`)
);
// endregion
// region Exporting public key into "subjectPublicKeyInfo" value of certificate
sequence = sequence.then(
() => certSimpl.subjectPublicKeyInfo.importKey(publicKey)
);
// endregion
// region Add CRL distribution point extension
sequence = sequence.then(
() => {
certSimpl.extensions.push(
new Extension({
extnID: "2.5.29.31",
critical: false,
parsedValue: {
distributionsPoints: [
new DistributionPoint({
distributionPoint: [
new GeneralName({
type: 6,
value: "http://example.com"
})
]
})
]
}
})
);
},
error => alert(`Error during exporting public key: ${error}`)
);
// endregion
// region Signing final certificate
sequence = sequence.then(
() => certSimpl.sign(privateKey, hashAlgorithm)
);
// endregion
// region Create OCSPRequest
const ocspReq = new OCSPRequest();
// endregion
// region Create OCSP for certificate
sequence = sequence.then(
() => {
return ocspReq.createForCertificate(certSimpl, {hashAlgorithm: "SHA-384", issuerCertificate: certSimpl});
},
error => alert(`Error during signing certificate: ${error}`)
);
// endregion
// region Ensure all is good
sequence.then(
() => {},
error => alert(`Error during create OCSP for cert: ${error}`)
);
// endregion
} | the_stack |
import {
tz,
} from 'moment-timezone';
import {
INodeProperties,
} from 'n8n-workflow';
export const appointmentOperations: INodeProperties[] = [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
displayOptions: {
show: {
resource: [
'appointment',
],
},
},
options: [
{
name: 'Create',
value: 'create',
description: 'Create an appointment',
},
{
name: 'Delete',
value: 'delete',
description: 'Delete an appointment',
},
{
name: 'Get',
value: 'get',
description: 'Retrieve an appointment',
},
{
name: 'Get All',
value: 'getAll',
description: 'Retrieve all appointments',
},
{
name: 'Update',
value: 'update',
description: 'Update an appointment',
},
],
default: 'create',
},
];
export const appointmentFields: INodeProperties[] = [
// ----------------------------------------
// appointment: create
// ----------------------------------------
{
displayName: 'Title',
name: 'title',
description: 'Title of the appointment',
type: 'string',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'create',
],
},
},
},
{
displayName: 'Start Date',
name: 'fromDate',
description: 'Timestamp that denotes the start of appointment. Start date if this is an all-day appointment.',
type: 'dateTime',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'create',
],
},
},
},
{
displayName: 'End Date',
name: 'endDate',
description: 'Timestamp that denotes the end of appointment. End date if this is an all-day appointment.',
type: 'dateTime',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'create',
],
},
},
},
{
displayName: 'Attendees',
name: 'attendees',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'create',
],
},
},
placeholder: 'Add Attendee',
default: {},
options: [
{
name: 'attendee',
displayName: 'Attendee',
values: [
{
displayName: 'Type',
name: 'type',
type: 'options',
options: [
{
name: 'Contact',
value: 'contact',
},
{
name: 'User',
value: 'user',
},
],
default: 'contact',
},
{
displayName: 'User ID',
name: 'userId',
type: 'options',
displayOptions: {
show: {
type: [
'user',
],
},
},
typeOptions: {
loadOptionsMethod: 'getUsers',
},
default: '',
},
{
displayName: 'Contact ID',
name: 'contactId',
displayOptions: {
show: {
type: [
'contact',
],
},
},
type: 'string',
default: '',
},
],
},
],
},
{
displayName: 'Additional Fields',
name: 'additionalFields',
type: 'collection',
placeholder: 'Add Field',
default: {},
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'create',
],
},
},
options: [
{
displayName: 'Creator ID',
name: 'creater_id',
type: 'options',
default: '',
typeOptions: {
loadOptionsMethod: 'getUsers',
},
description: 'ID of the user who created the appointment',
},
{
displayName: 'Is All-Day',
name: 'is_allday',
type: 'boolean',
default: false,
description: 'Whether it is an all-day appointment or not',
},
{
displayName: 'Latitude',
name: 'latitude',
type: 'string',
default: '',
description: 'Latitude of the location when you check in for an appointment',
},
{
displayName: 'Location',
name: 'location',
type: 'string',
default: '',
description: 'Location of the appointment',
},
{
displayName: 'Longitude',
name: 'longitude',
type: 'string',
default: '',
description: 'Longitude of the location when you check in for an appointment',
},
{
displayName: 'Outcome ID',
name: 'outcome_id',
type: 'options',
default: '',
typeOptions: {
loadOptionsMethod: 'getOutcomes',
},
description: 'ID of outcome of Appointment sales activity type',
},
{
displayName: 'Target ID',
name: 'targetable_id',
type: 'string',
default: '',
description: 'ID of contact/account against whom appointment is created',
},
{
displayName: 'Target Type',
name: 'targetable_type',
type: 'options',
default: 'Contact',
options: [
{
name: 'Contact',
value: 'Contact',
},
{
name: 'Deal',
value: 'Deal',
},
{
name: 'SalesAccount',
value: 'SalesAccount',
},
],
},
{
displayName: 'Time Zone',
name: 'time_zone',
type: 'options',
default: '',
description: 'Timezone that the appointment is scheduled in',
options: tz.names().map(tz => ({ name: tz, value: tz })),
},
],
},
// ----------------------------------------
// appointment: delete
// ----------------------------------------
{
displayName: 'Appointment ID',
name: 'appointmentId',
description: 'ID of the appointment to delete',
type: 'string',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'delete',
],
},
},
},
// ----------------------------------------
// appointment: get
// ----------------------------------------
{
displayName: 'Appointment ID',
name: 'appointmentId',
description: 'ID of the appointment to retrieve',
type: 'string',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'get',
],
},
},
},
// ----------------------------------------
// appointment: getAll
// ----------------------------------------
{
displayName: 'Return All',
name: 'returnAll',
type: 'boolean',
default: false,
description: 'Whether to return all results or only up to a given limit',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'getAll',
],
},
},
},
{
displayName: 'Limit',
name: 'limit',
type: 'number',
default: 50,
description: 'How many results to return',
typeOptions: {
minValue: 1,
},
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'getAll',
],
returnAll: [
false,
],
},
},
},
{
displayName: 'Filters',
name: 'filters',
type: 'collection',
default: '',
placeholder: 'Add Filter',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'getAll',
],
},
},
options: [
{
displayName: 'Include',
name: 'include',
type: 'options',
default: 'creater',
options: [
{
name: 'Appointment Attendees',
value: 'appointment_attendees',
},
{
name: 'Creator',
value: 'creater',
},
{
name: 'Target',
value: 'targetable',
},
],
},
{
displayName: 'Time',
name: 'filter',
type: 'options',
default: 'upcoming',
options: [
{
name: 'Past',
value: 'past',
},
{
name: 'Upcoming',
value: 'upcoming',
},
],
},
],
},
// ----------------------------------------
// appointment: update
// ----------------------------------------
{
displayName: 'Appointment ID',
name: 'appointmentId',
description: 'ID of the appointment to update',
type: 'string',
required: true,
default: '',
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'update',
],
},
},
},
{
displayName: 'Update Fields',
name: 'updateFields',
type: 'collection',
placeholder: 'Add Field',
default: {},
displayOptions: {
show: {
resource: [
'appointment',
],
operation: [
'update',
],
},
},
options: [
{
displayName: 'Attendees',
name: 'attendees',
type: 'fixedCollection',
typeOptions: {
multipleValues: true,
},
placeholder: 'Add Attendee',
default: {},
options: [
{
name: 'attendee',
displayName: 'Attendee',
values: [
{
displayName: 'Type',
name: 'type',
type: 'options',
options: [
{
name: 'Contact',
value: 'contact',
},
{
name: 'User',
value: 'user',
},
],
default: 'contact',
},
{
displayName: 'User ID',
name: 'userId',
type: 'options',
displayOptions: {
show: {
type: [
'user',
],
},
},
typeOptions: {
loadOptionsMethod: 'getUsers',
},
default: '',
},
{
displayName: 'Contact ID',
name: 'contactId',
displayOptions: {
show: {
type: [
'contact',
],
},
},
type: 'string',
default: '',
},
],
},
],
},
{
displayName: 'Creator ID',
name: 'creater_id',
type: 'options',
default: [],
typeOptions: {
loadOptionsMethod: 'getUsers',
},
description: 'ID of the user who created the appointment',
},
{
displayName: 'End Date',
name: 'endDate',
description: 'Timestamp that denotes the end of appointment. End date if this is an all-day appointment.',
type: 'dateTime',
default: '',
},
{
displayName: 'Is All-Day',
name: 'is_allday',
type: 'boolean',
default: false,
description: 'Whether it is an all-day appointment or not',
},
{
displayName: 'Latitude',
name: 'latitude',
type: 'string',
default: '',
description: 'Latitude of the location when you check in for an appointment',
},
{
displayName: 'Location',
name: 'location',
type: 'string',
default: '',
description: 'Location of the appointment',
},
{
displayName: 'Longitude',
name: 'longitude',
type: 'string',
default: '',
description: 'Longitude of the location when you check in for an appointment',
},
{
displayName: 'Outcome ID',
name: 'outcome_id',
type: 'options',
default: '',
typeOptions: {
loadOptionsMethod: 'getOutcomes',
},
description: 'ID of outcome of Appointment sales activity type',
},
{
displayName: 'Start Date',
name: 'fromDate',
description: 'Timestamp that denotes the start of appointment. Start date if this is an all-day appointment.',
type: 'dateTime',
default: '',
},
{
displayName: 'Target ID',
name: 'targetable_id',
type: 'string',
default: '',
description: 'ID of contact/account against whom appointment is created',
},
{
displayName: 'Target Type',
name: 'targetable_type',
type: 'options',
default: 'Contact',
options: [
{
name: 'Contact',
value: 'Contact',
},
{
name: 'Deal',
value: 'Deal',
},
{
name: 'SalesAccount',
value: 'SalesAccount',
},
],
},
{
displayName: 'Time Zone',
name: 'time_zone',
type: 'options',
default: '',
description: 'Timezone that the appointment is scheduled in',
options: tz.names().map(tz => ({ name: tz, value: tz })),
},
{
displayName: 'Title',
name: 'title',
type: 'string',
default: '',
description: 'Title of the appointment',
},
],
},
]; | the_stack |
import test from 'tape';
import {
addressLabels,
assertCombinedVault,
assertDeactivatedVault,
getAccount,
initVault,
killStuckProcess,
logDebug,
spokSameBignum,
verifyTokenBalance,
} from './utils';
import { Connection, Keypair, PublicKey, Transaction } from '@solana/web3.js';
import {
assertConfirmedTransaction,
assertError,
assertTransactionSummary,
PayerTransactionHandler,
TokenBalances,
} from '@metaplex-foundation/amman';
import {
activateVault,
addTokenToInactiveVault,
combineVault,
CombineVaultSetup,
SafetyDepositBoxVaultMismatchError,
SafetyDepositSetup,
setupWithdrawFromSafetyDestinationAccount,
StoreLessThanAmountError,
withdrawTokenFromSafetyDepositBox,
WithdrawTokenFromSafetyDepositBoxAccounts,
} from '../src/mpl-token-vault';
import spok from 'spok';
import { cusper } from '../src/errors';
killStuckProcess();
test('combined vault: with one safety deposit, withdraw all tokens', async (t) => {
const TOKEN_AMOUNT = 2;
const { transactionHandler, connection, safetyDeposit, initVaultAccounts } =
await combinedVaultWithOneDeposit(t, TOKEN_AMOUNT);
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: safetyDeposit.tokenMint,
});
addressLabels.addLabels({ destination });
const accounts: WithdrawTokenFromSafetyDepositBoxAccounts = {
destination,
fractionMint,
vault,
vaultAuthority,
store: safetyDeposit.store,
safetyDeposit: safetyDeposit.safetyDeposit,
};
const withdrawIx = await withdrawTokenFromSafetyDepositBox(accounts, TOKEN_AMOUNT);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
assertTransactionSummary(t, res.txSummary, {
msgRx: [/Withdraw Token from Safety Deposit Box/i, /Transfer/i, /success/i],
});
const tokens = await TokenBalances.forTransaction(
connection,
res.txSignature,
addressLabels,
).dump(logDebug);
await verifyTokenBalance(t, tokens, safetyDeposit.store, safetyDeposit.tokenMint, 2, 0);
await verifyTokenBalance(t, tokens, destination, safetyDeposit.tokenMint, 0, 2);
await assertDeactivatedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
});
});
test('combined vault: with one safety deposit, withdraw half of all tokens', async (t) => {
const TOKEN_AMOUNT = 2;
const { transactionHandler, connection, safetyDeposit, initVaultAccounts } =
await combinedVaultWithOneDeposit(t, TOKEN_AMOUNT);
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: safetyDeposit.tokenMint,
});
addressLabels.addLabels({ destination });
const accounts: WithdrawTokenFromSafetyDepositBoxAccounts = {
destination,
fractionMint,
vault,
vaultAuthority,
store: safetyDeposit.store,
safetyDeposit: safetyDeposit.safetyDeposit,
};
const withdrawIx = await withdrawTokenFromSafetyDepositBox(accounts, TOKEN_AMOUNT / 2);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
assertTransactionSummary(t, res.txSummary, {
msgRx: [/Withdraw Token from Safety Deposit Box/i, /Transfer/i, /success/i],
});
const tokens = await TokenBalances.forTransaction(
connection,
res.txSignature,
addressLabels,
).dump(logDebug);
await verifyTokenBalance(t, tokens, safetyDeposit.store, safetyDeposit.tokenMint, 2, 1);
await verifyTokenBalance(t, tokens, destination, safetyDeposit.tokenMint, 0, 1);
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 1,
});
});
test('combined vault: with two safety deposits, repeatedly withdrawing tokens until all are gone', async (t) => {
const FIRST_TOKEN_AMOUNT = 2;
const SECOND_TOKEN_AMOUNT = 3;
const {
transactionHandler,
connection,
firstSafetyDeposit,
secondSafetyDeposit,
initVaultAccounts,
} = await combinedVaultWithTwoDeposits(t, FIRST_TOKEN_AMOUNT, SECOND_TOKEN_AMOUNT);
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const firstDepositAccounts: Omit<WithdrawTokenFromSafetyDepositBoxAccounts, 'destination'> = {
fractionMint,
vault,
vaultAuthority,
store: firstSafetyDeposit.store,
safetyDeposit: firstSafetyDeposit.safetyDeposit,
};
const secondDepositAccounts: Omit<WithdrawTokenFromSafetyDepositBoxAccounts, 'destination'> = {
fractionMint,
vault,
vaultAuthority,
store: secondSafetyDeposit.store,
safetyDeposit: secondSafetyDeposit.safetyDeposit,
};
{
t.comment('+++ Withdrawing half of first deposit');
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: firstSafetyDeposit.tokenMint,
});
const withdrawIx = await withdrawTokenFromSafetyDepositBox(
{ ...firstDepositAccounts, destination },
FIRST_TOKEN_AMOUNT / 2,
);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
assertTransactionSummary(t, res.txSummary, {
msgRx: [/Withdraw Token from Safety Deposit Box/i, /Transfer/i, /success/i],
});
const tokens = await TokenBalances.forTransaction(
connection,
res.txSignature,
addressLabels,
).dump(logDebug);
await verifyTokenBalance(
t,
tokens,
firstSafetyDeposit.store,
firstSafetyDeposit.tokenMint,
2,
1,
);
await verifyTokenBalance(t, tokens, destination, firstSafetyDeposit.tokenMint, 0, 1);
const storeAccount = await getAccount(connection, firstSafetyDeposit.store);
spok(t, storeAccount, {
$topic: 'firstSafetyDeposit: store account',
mint: firstSafetyDeposit.tokenMint,
amount: spokSameBignum(1),
});
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 2,
});
}
{
t.comment('+++ Withdrawing all of second deposit');
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: secondSafetyDeposit.tokenMint,
});
const withdrawIx = await withdrawTokenFromSafetyDepositBox(
{ ...secondDepositAccounts, destination },
SECOND_TOKEN_AMOUNT,
);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
assertTransactionSummary(t, res.txSummary, {
msgRx: [/Withdraw Token from Safety Deposit Box/i, /Transfer/i, /success/i],
});
const tokens = await TokenBalances.forTransaction(
connection,
res.txSignature,
addressLabels,
).dump(logDebug);
await verifyTokenBalance(
t,
tokens,
secondSafetyDeposit.store,
secondSafetyDeposit.tokenMint,
SECOND_TOKEN_AMOUNT,
0,
);
await verifyTokenBalance(
t,
tokens,
destination,
secondSafetyDeposit.tokenMint,
0,
SECOND_TOKEN_AMOUNT,
);
const storeAccount = await getAccount(connection, secondSafetyDeposit.store);
spok(t, storeAccount, {
$topic: 'secondSafetyDeposit: store account',
mint: secondSafetyDeposit.tokenMint,
amount: spokSameBignum(0),
});
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 1,
});
}
{
t.comment('+++ Withdrawing remaining tokens of first deposit');
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: firstSafetyDeposit.tokenMint,
});
const withdrawIx = await withdrawTokenFromSafetyDepositBox(
{ ...firstDepositAccounts, destination },
FIRST_TOKEN_AMOUNT / 2,
);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
assertTransactionSummary(t, res.txSummary, {
msgRx: [/Withdraw Token from Safety Deposit Box/i, /Transfer/i, /success/i],
});
const tokens = await TokenBalances.forTransaction(
connection,
res.txSignature,
addressLabels,
).dump(logDebug);
await verifyTokenBalance(
t,
tokens,
firstSafetyDeposit.store,
firstSafetyDeposit.tokenMint,
1,
0,
);
await verifyTokenBalance(t, tokens, destination, firstSafetyDeposit.tokenMint, 0, 1);
await assertDeactivatedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 0,
});
}
});
// -----------------
// Failure cases
// -----------------
test('combined vault: with one safety deposit, withdraw more tokens than it contains, fails', async (t) => {
const TOKEN_AMOUNT = 2;
const { transactionHandler, connection, safetyDeposit, initVaultAccounts } =
await combinedVaultWithOneDeposit(t, TOKEN_AMOUNT);
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: safetyDeposit.tokenMint,
});
addressLabels.addLabels({ destination });
const accounts: WithdrawTokenFromSafetyDepositBoxAccounts = {
destination,
fractionMint,
vault,
vaultAuthority,
store: safetyDeposit.store,
safetyDeposit: safetyDeposit.safetyDeposit,
};
const withdrawIx = await withdrawTokenFromSafetyDepositBox(accounts, TOKEN_AMOUNT + 1);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
try {
await transactionHandler.sendAndConfirmTransaction(tx, signers);
} catch (err) {
assertError(t, err, [/Withdraw Token from Safety Deposit Box/i, /Store has less than amount/i]);
const cusperError = cusper.errorFromProgramLogs(err.logs);
t.ok(
cusperError instanceof StoreLessThanAmountError,
'cusper identifies as StoreLessThanAmountError',
);
}
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 1,
});
// Store should not have been debited
const storeAccount = await getAccount(connection, safetyDeposit.store);
spok(t, storeAccount, {
$topic: 'safetyDeposit: store account',
mint: safetyDeposit.tokenMint,
amount: spokSameBignum(TOKEN_AMOUNT),
});
});
test('combined vault: with no safety deposit passing uninitialized one, withdraw tokens, fails', async (t) => {
const TOKEN_AMOUNT = 2;
const { transactionHandler, connection, safetyDeposit, initVaultAccounts } =
await combinedVaultWithOneDeposit(t, TOKEN_AMOUNT, { forgetToMakeDeposit: true });
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: safetyDeposit.tokenMint,
});
addressLabels.addLabels({ destination });
const accounts: WithdrawTokenFromSafetyDepositBoxAccounts = {
destination,
fractionMint,
vault,
vaultAuthority,
store: safetyDeposit.store,
safetyDeposit: safetyDeposit.safetyDeposit,
};
const withdrawIx = await withdrawTokenFromSafetyDepositBox(accounts, TOKEN_AMOUNT);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
try {
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
console.log(res.txSignature);
} catch (err) {
assertError(t, err, [/Withdraw Token from Safety Deposit Box/i, /index out of bounds/i]);
}
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 0,
});
});
test('combined vault: with no safety deposit passing one from different vault, withdraw tokens, fails', async (t) => {
const TOKEN_AMOUNT = 2;
const { safetyDeposit: otherVaultSafetyDeposit } = await combinedVaultWithOneDeposit(
t,
TOKEN_AMOUNT,
);
const { transactionHandler, connection, initVaultAccounts } = await combinedVaultWithOneDeposit(
t,
TOKEN_AMOUNT,
{ forgetToMakeDeposit: true },
);
const {
payer,
vault,
authority: vaultAuthority,
vaultAuthorityPair,
fractionMint,
} = initVaultAccounts;
const [setupDestinationIxs, setupDestinationSigners, { destination }] =
await setupWithdrawFromSafetyDestinationAccount(connection, {
payer,
mint: otherVaultSafetyDeposit.tokenMint,
});
addressLabels.addLabels({ destination });
const accounts: WithdrawTokenFromSafetyDepositBoxAccounts = {
destination,
fractionMint,
vault,
vaultAuthority,
store: otherVaultSafetyDeposit.store,
safetyDeposit: otherVaultSafetyDeposit.safetyDeposit,
};
const withdrawIx = await withdrawTokenFromSafetyDepositBox(accounts, TOKEN_AMOUNT);
const signers = [...setupDestinationSigners, vaultAuthorityPair];
const tx = new Transaction().add(...setupDestinationIxs).add(withdrawIx);
try {
await transactionHandler.sendAndConfirmTransaction(tx, signers);
} catch (err) {
assertError(t, err, [
/Withdraw Token from Safety Deposit Box/i,
/safety deposit.+does not belong to this vault/i,
]);
const cusperError = cusper.errorFromProgramLogs(err.logs);
t.ok(
cusperError instanceof SafetyDepositBoxVaultMismatchError,
'is SafetyDepositBoxVaultMismatch error',
);
}
await assertCombinedVault(t, connection, initVaultAccounts, {
allowFurtherShareCreation: true,
tokenTypeCount: 0,
});
// Store of other vault should not have been debited
const storeAccount = await getAccount(connection, otherVaultSafetyDeposit.store);
spok(t, storeAccount, {
$topic: 'safetyDeposit: store account',
mint: otherVaultSafetyDeposit.tokenMint,
amount: spokSameBignum(TOKEN_AMOUNT),
});
});
// -----------------
// Helpers
// -----------------
async function addSafetyDeposit(
t: test.Test,
transactionHandler: PayerTransactionHandler,
connection: Connection,
vaultAuthorityPair: Keypair,
accounts: {
vault: PublicKey;
payer: PublicKey;
},
mintAmount: number,
opts: { forgetToMakeDeposit?: boolean } = {},
) {
const { vault, payer } = accounts;
const { forgetToMakeDeposit = false } = opts;
const safetyDepositSetup = await SafetyDepositSetup.create(connection, {
payer,
vault,
mintAmount,
});
addressLabels.addLabels(safetyDepositSetup);
const addTokenIx = await addTokenToInactiveVault(safetyDepositSetup, {
payer,
vaultAuthority: vaultAuthorityPair.publicKey,
});
const tx = new Transaction().add(...safetyDepositSetup.instructions);
const signers = [...safetyDepositSetup.signers];
// Setup the safety deposit, but maybe don't actually add it the vault to create invalid scenarios
if (!forgetToMakeDeposit) {
tx.add(addTokenIx);
signers.push(...[safetyDepositSetup.transferAuthorityPair, vaultAuthorityPair]);
}
const res = await transactionHandler.sendAndConfirmTransaction(tx, signers);
assertConfirmedTransaction(t, res.txConfirmed);
return safetyDepositSetup;
}
async function activateAndCombineVault(
t: test.Test,
transactionHandler: PayerTransactionHandler,
connection: Connection,
vaultAuthorityPair: Keypair,
accounts: {
vault: PublicKey;
fractionMint: PublicKey;
fractionTreasury: PublicKey;
redeemTreasury: PublicKey;
priceMint: PublicKey;
pricingLookupAddress: PublicKey;
payer: PublicKey;
},
) {
const {
vault,
fractionMint,
fractionTreasury,
redeemTreasury,
priceMint,
pricingLookupAddress,
payer,
} = accounts;
const activateVaultIx = await activateVault(
vault,
{
vault,
vaultAuthority: vaultAuthorityPair.publicKey,
fractionMint,
fractionTreasury,
},
0,
);
const combineSetup: CombineVaultSetup = await CombineVaultSetup.create(connection, {
vault,
vaultAuthority: vaultAuthorityPair.publicKey,
fractionMint,
fractionTreasury,
redeemTreasury,
priceMint,
externalPricing: pricingLookupAddress,
});
await combineSetup.createOutstandingShares(payer);
await combineSetup.createPayment(payer);
combineSetup.approveTransfers(payer);
combineSetup.assertComplete();
addressLabels.addLabels(combineSetup);
const combineIx = await combineVault(combineSetup);
const tx = new Transaction()
.add(activateVaultIx)
.add(...combineSetup.instructions)
.add(combineIx);
const res = await transactionHandler.sendAndConfirmTransaction(tx, [
...combineSetup.signers,
combineSetup.transferAuthorityPair,
vaultAuthorityPair,
]);
assertConfirmedTransaction(t, res.txConfirmed);
}
async function combinedVaultWithOneDeposit(
t: test.Test,
tokenAmount: number,
opts: { forgetToMakeDeposit?: boolean } = {},
) {
const {
transactionHandler,
connection,
accounts: initVaultAccounts,
} = await initVault(t, { allowFurtherShareCreation: true });
const { vaultAuthorityPair } = initVaultAccounts;
const safetyDeposit = await addSafetyDeposit(
t,
transactionHandler,
connection,
vaultAuthorityPair,
initVaultAccounts,
tokenAmount,
opts,
);
await activateAndCombineVault(
t,
transactionHandler,
connection,
vaultAuthorityPair,
initVaultAccounts,
);
return {
transactionHandler,
connection,
safetyDeposit,
initVaultAccounts,
};
}
async function combinedVaultWithTwoDeposits(
t: test.Test,
firstTokenAmount: number,
secondTokenAmount: number,
) {
const {
transactionHandler,
connection,
accounts: initVaultAccounts,
} = await initVault(t, { allowFurtherShareCreation: true });
const { vaultAuthorityPair } = initVaultAccounts;
const firstSafetyDeposit = await addSafetyDeposit(
t,
transactionHandler,
connection,
vaultAuthorityPair,
initVaultAccounts,
firstTokenAmount,
);
const secondSafetyDeposit = await addSafetyDeposit(
t,
transactionHandler,
connection,
vaultAuthorityPair,
initVaultAccounts,
secondTokenAmount,
);
await activateAndCombineVault(
t,
transactionHandler,
connection,
vaultAuthorityPair,
initVaultAccounts,
);
return {
transactionHandler,
connection,
firstSafetyDeposit,
secondSafetyDeposit,
initVaultAccounts,
};
} | the_stack |
import * as storage from 'localforage'
import _ from 'lodash'
import { EmoteSets } from 'twitch-js'
import RequestMethod from 'constants/requestMethod'
import { getRandomString } from 'utils/crypto'
import { subDays, subMonths } from 'utils/date'
/**
* Twitch various APIs.
*/
enum TwitchApi {
Auth = 'https://id.twitch.tv/oauth2',
Badges = 'https://badges.twitch.tv/v1/badges',
Helix = 'https://api.twitch.tv/helix',
Kraken = 'https://api.twitch.tv/kraken',
Tmi = 'https://tmi.twitch.tv',
}
/**
* Twitch broadcast type.
*/
export enum BroadcastType {
All = 'all',
Archive = 'archive',
Highlight = 'highlight',
Upload = 'upload',
}
/**
* Twitch clip discovery period.
*/
export enum ClipPeriod {
All = 'all',
Day = 'day',
Month = 'month',
Week = 'week',
}
/**
* Twitch status.
*/
export enum Status {
Disrupted,
Online,
Unknown,
}
/**
* Twitch commercial durations.
*/
export type CommercialDuration = 30 | 60 | 90 | 120 | 150 | 180
/**
* CORS proxy URL.
*/
const ProxyURL = 'https://cors.bridged.cc/'
/**
* The key used to persist a state used while authenticating with Twitch.
*/
const AuthStorageKey = 'persist:YaTA:auth'
/**
* Twitch class.
*/
export default class Twitch {
/**
* Sets the Twitch token and user id to use for authenticated calls.
* @param userId - The user id or null to invalidate.
* @param token - The token or null to invalidate.
*/
public static setAuthDetails(userId: string | null, token: string | null = null) {
Twitch.userId = userId
Twitch.token = token
}
/**
* Returns the Twitch authentication URL.
* @param redirect - The optional channel to use when redirecting the user after authentication.
* @return The auth URL.
*/
public static getAuthURL(redirect: Optional<string>) {
const { REACT_APP_TWITCH_CLIENT_ID, REACT_APP_TWITCH_REDIRECT_URI } = process.env
const state: AuthState = {
token: getRandomString(),
redirect: redirect,
}
storage.setItem(AuthStorageKey, state)
const params = {
client_id: REACT_APP_TWITCH_CLIENT_ID,
redirect_uri: REACT_APP_TWITCH_REDIRECT_URI,
response_type: 'token id_token',
scope:
'openid chat:read chat:edit channel:moderate whispers:read whispers:edit user_blocks_edit clips:edit user:edit:follows user:edit:broadcast channel:edit:commercial user_subscriptions moderator:manage:automod',
state: encodeURIComponent(JSON.stringify(state)),
}
return Twitch.getUrl(TwitchApi.Auth, '/authorize', params)
}
/**
* Returns the auth response token.
* @param hash - The URL hash to parse
* @return The parsed tokens.
*/
public static async getAuthTokens(hash: string) {
const params = new URLSearchParams(hash.substring(1))
if (!params.has('access_token') || !params.has('id_token') || !params.has('state')) {
throw new Error('Invalid auth response.')
}
const persistedState = await storage.getItem<AuthState>(AuthStorageKey)
storage.removeItem(AuthStorageKey)
if (_.isNil(persistedState)) {
throw new Error('No persisted state in storage.')
}
const stateStr = params.get('state')
if (_.isNil(stateStr)) {
throw new Error('No state in response.')
}
let state: AuthState
try {
state = JSON.parse(decodeURIComponent(stateStr))
} catch (error) {
throw new Error('Unable to parse state from response.')
}
if (state.token !== persistedState.token) {
throw new Error('Invalid state from response.')
}
return {
access: params.get('access_token') as string,
id: params.get('id_token') as string,
redirect: state.redirect,
}
}
/**
* Validates an ID token.
* @param token - The ID token received during authentication.
* @return The verified ID token.
*/
public static async verifyIdToken(token: string) {
const jwk = await Twitch.fetchJWK()
const jose = await import('node-jose')
const keystore = await jose.JWK.asKeyStore(jwk)
const jws = await jose.JWS.createVerify(keystore).verify(token)
const idToken = JSON.parse(jws.payload.toString()) as IdToken
if (_.get(idToken, 'aud') !== process.env.REACT_APP_TWITCH_CLIENT_ID || _.get(idToken, 'iss') !== TwitchApi.Auth) {
throw new Error('Unable to verify ID token.')
}
return idToken
}
/**
* Sanitizes the name of a channel (remove the extra # at the beginning if present).
* @param channel - The channel name to sanitize.
* @return The sanitized name.
*/
public static sanitizeChannel(channel: string) {
if (channel.charAt(0) === '#') {
return channel.substr(1)
}
return channel
}
/**
* Returns the URL to use to embed Twitch content.
* @param url - The Twitch URL to embed.
* @return The parent parameter.
*/
public static getTwitchEmbedUrl(url: string): string {
return url.concat('&parent=yata.vercel.app')
}
/**
* Returns an URL based on a URL template returned by Twitch.
* @param templateUrl - The URL template.
* @param params - An object describing the key & associated values for each template segments.
* @return The transformed URL.
*/
public static getTwitchTemplatedUrl(templateUrl: string, params: Record<string, string>): string {
let url = templateUrl
_.forEach(params, (value, key) => {
const regExp = new RegExp(`%{${key}}`, 'g')
url = url.replace(regExp, value)
})
return url
}
/**
* Opens a specific Twitch channel.
* @param channel - The channel.
*/
public static openChannel(channel: string) {
window.open(`https://www.twitch.tv/${channel}`)
}
/**
* Opens the viewer card of a specific user.
* @param channel - The channel.
* @param username - The user.
*/
public static openViewerCard(channel: string, username: string) {
window.open(
`https://www.twitch.tv/popout/${channel}/viewercard/${username}?popout=`,
'twitchToolsPopupWindow',
'height=600,width=500'
)
}
/**
* Opens the Twitch rewards queue of a specific channel.
* @param channel - The channel.
*/
public static openRewardsQueue(channel: string) {
window.open(
`https://www.twitch.tv/popout/${channel}/reward-queue`,
'twitchRewardsQueuePopupWindow',
'height=500,width=800'
)
}
/**
* Opens the Twitch mod view of a specific channel.
* @param channel - The channel.
*/
public static openModView(channel: string) {
window.open(`https://www.twitch.tv/moderator/${channel}`, 'twitchModViewPopupWindow', 'height=800,width=1200')
}
/**
* Opens the Twitch stream manager of a specific channel.
* @param channel - The channel.
*/
public static openStreamManager(channel: string) {
window.open(
`https://dashboard.twitch.tv/u/${channel}/stream-manager`,
'twitchStreamManagerPopupWindow',
'height=800,width=1200'
)
}
/**
* Opens the Twitch stream summary of a specific channel.
* @param channel - The channel.
*/
public static openStreamSummary(channel: string) {
window.open(
`https://dashboard.twitch.tv/u/${channel}/stream-summary`,
'twitchStreamSummaryPopupWindow',
'height=800,width=1200'
)
}
/**
* Opens the Twitch stream infos of a specific channel.
* @param channel - The channel.
*/
public static openStreamInfos(channel: string) {
window.open(
`https://www.twitch.tv/popout/${channel}/dashboard/live/stream-info`,
'twitchStreamInfosPopupWindow',
'height=600,width=500'
)
}
/**
* Opens the Twitch activity feed of a specific channel.
* @param channel - The channel.
*/
public static openActivityFeed(channel: string) {
window.open(
`https://www.twitch.tv/popout/${channel}/dashboard/live/activity-feed`,
'twitchActivityFeedPopupWindow',
'height=800,width=500'
)
}
/**
* Opens the video player of a specific channel.
* @param channel - The channel.
*/
public static openVideoPlayer(channel: string) {
window.open(
Twitch.getTwitchEmbedUrl(`https://player.twitch.tv/?muted=false&channel=${channel}`),
'videoPopupWindow',
'height=360,width=600'
)
}
/**
* Fetches Twitch badges.
* @param channelId - The id of the channel.
* @return The badges.
*/
public static async fetchBadges(channelId: string): Promise<RawBadges> {
const response = await Promise.all([
(await Twitch.fetch(TwitchApi.Badges, '/global/display')).json(),
(await Twitch.fetch(TwitchApi.Badges, `/channels/${channelId}/display`)).json(),
])
const [globalBadges, channelBadges] = response
return { ...globalBadges.badge_sets, ...channelBadges.badge_sets }
}
/**
* Fetches details about a specific user using its name.
* @param name - The user name.
* @return The user details.
*/
public static async fetchUserByName(name: string): Promise<Optional<RawHelixUser>> {
const response = await Twitch.fetch(TwitchApi.Helix, '/users', { login: name }, true, RequestMethod.Get)
const { data: users }: { data: RawHelixUser[] } = await response.json()
if (users.length === 1) {
return _.first(users)
}
return
}
/**
* Fetches informations about a channel.
* @param channelId - The channel id.
* @return The channel informations.
*/
public static async fetchChannelInformations(channelId: string): Promise<RawChannelInformations> {
const response = await Twitch.fetch(
TwitchApi.Helix,
'/channels',
{ broadcaster_id: channelId },
true,
RequestMethod.Get
)
return (await response.json()).data[0]
}
/**
* Updates a channel informations.
* @param channelId - The id of the channel to update.
* @param title - The channel status / title.
* @param categoryId - The channel category ID.
* @return The updated channel.
*/
public static async updateChannelInformations(channelId: string, title: string, categoryId?: string) {
const params: Record<string, string> = { broadcaster_id: channelId, title }
if (categoryId) {
params['game_id'] = categoryId
}
return Twitch.fetch(TwitchApi.Helix, '/channels', params, true, RequestMethod.Patch)
}
/**
* Starts a commercial on a channel.
* @param channelId - The id of the channel.
* @param duration - The commercial duration.
*/
public static async startCommercial(channelId: string, duration: CommercialDuration) {
const response = await Twitch.fetch(TwitchApi.Helix, '/channels/commercial', undefined, true, RequestMethod.Post, {
broadcaster_id: channelId,
length: duration,
})
return response.json()
}
/**
* Creates a stream marker.
* @param channelId - The id of the channel.
* @param description - A description of the marker.
*/
public static async createMarker(channelId: string, description: Optional<string>) {
const response = await Twitch.fetch(TwitchApi.Helix, '/streams/markers', undefined, true, RequestMethod.Post, {
user_id: channelId,
description,
})
return response.json()
}
/**
* Fetches user emotes.
* @param channelId - The id of the channel.
* @return The channel live notification.
*/
public static async fetchUserEmotes(): Promise<{ emoticon_sets: EmoteSets }> {
if (_.isNil(Twitch.userId)) {
throw new Error('Missing user id for emotes fetching.')
}
const response = await Twitch.fetch(
TwitchApi.Kraken,
`/users/${Twitch.userId}/emotes`,
undefined,
true,
RequestMethod.Get
)
return response.json()
}
/**
* Approves or denies a message rejected by AutoMod.
* @param messageId - The ID of the rejected message.
* @param action - The action to either allow or deny the rejected message.
*/
public static manageAutoModMessage(messageId: string, action: 'allow' | 'deny') {
if (_.isNil(Twitch.userId)) {
throw new Error('Missing source user id for relationship fetching.')
}
return Twitch.fetch(
TwitchApi.Helix,
'/moderation/automod/message',
{
action: action.toUpperCase(),
msg_id: messageId,
user_id: Twitch.userId,
},
true,
RequestMethod.Post
)
}
/**
* Returns the top clips for a specific channel.
* @param channelId - The ID of the channel.
* @param period - The period to include.
* @param [limit=10] - The number of clips to return.
* @return The top clips.
*/
public static async fetchTopClips(channelId: string, period: ClipPeriod, limit = 10): Promise<RawClip[]> {
const params: Record<string, string> = {
broadcaster_id: channelId,
first: limit.toString(),
}
if (period !== ClipPeriod.All) {
const now = new Date()
params['ended_at'] = now.toISOString()
if (period === ClipPeriod.Day) {
params['started_at'] = subDays(now, 1).toISOString()
} else if (period === ClipPeriod.Week) {
params['started_at'] = subDays(now, 7).toISOString()
} else if (period === ClipPeriod.Month) {
params['started_at'] = subMonths(now, 1).toISOString()
}
}
const response = await Twitch.fetch(TwitchApi.Helix, '/clips', params, true, RequestMethod.Get)
return (await response.json()).data
}
/**
* Fetches details about a stream.
* @param channelId - The channel id.
* @return The stream details.
*/
public static async fetchStream(channelId: string): Promise<{ stream: RawStream | null }> {
const response = await Twitch.fetch(TwitchApi.Kraken, `/streams/${channelId}`)
return response.json()
}
/**
* Fetches videos for a channel.
* @param channelId - The channel id.
* @param [limit=10] - Number of videos to return.
* @param [type=BroadcastType.Archive] - Type of videos to return.
* @return The channel videos.
*/
public static async fetchChannelVideos(
channelId: string,
limit = 10,
type = BroadcastType.Archive
): Promise<RawVideo[]> {
const response = await Twitch.fetch(
TwitchApi.Helix,
'/videos',
{
first: limit.toString(),
type,
user_id: channelId,
},
true,
RequestMethod.Get
)
return (await response.json()).data
}
/**
* Creates a clip.
* @param channelId - The channel id.
* @param [withDelay=false] - Add a delay before capturing the clip.
* @return The new clip details.
*/
public static async createClip(channelId: string, withDelay = false): Promise<RawNewClips> {
const params = {
broadcaster_id: channelId,
has_delay: withDelay.toString(),
}
const response = await Twitch.fetch(TwitchApi.Helix, '/clips', params, true, RequestMethod.Post)
return response.json()
}
/**
* Fetches cheermotes.
* @param channelId - The id of the channel.
* @return The cheermotes.
*/
public static async fetchCheermotes(channelId: string): Promise<{ data: RawCheermote[] }> {
const response = await Twitch.fetch(
TwitchApi.Helix,
'/bits/cheermotes',
{
broadcaster_id: channelId,
},
true,
RequestMethod.Get
)
return response.json()
}
/**
* Fetches details about a clip.
* @param clipId - The clip ID.
* @return The clip details.
*/
public static async fetchClip(clipId: string): Promise<RawClip> {
const response = await Twitch.fetch(
TwitchApi.Helix,
'/clips',
{
id: clipId,
},
true,
RequestMethod.Get
)
return (await response.json()).data[0]
}
/**
* Fetches details about a video.
* @param videoId - The video id.
* @return The video details.
*/
public static async fetchVideo(videoId: string): Promise<RawVideo> {
const response = await Twitch.fetch(
TwitchApi.Helix,
'/videos',
{
id: videoId,
},
true,
RequestMethod.Get
)
return (await response.json()).data[0]
}
/**
* Fetches chatters of a specific channel.
* @param channel - The channel.
* @return The chatter.
*/
public static async fetchChatters(channel: string): Promise<RawChattersDetails> {
const response = await fetch(Twitch.getUrl(TwitchApi.Tmi, `/group/user/${channel}/chatters`, undefined, true), {
headers: {
'X-Requested-With': '',
},
})
return response.json()
}
/**
* Fetches follows for the current user which consist of online streams, offline channels and its own online stream if
* streaming.
* @return The streams and channels.
*/
public static async fetchFollows(): Promise<Followers> {
const follows = await Twitch.fetchAuthenticatedUserFollows()
const streams = await Twitch.fetchAuthenticatedUserStreams()
const offline = _.reduce(
follows,
(offlines, follow) => {
if (_.isNil(_.find(streams, ['channel.name', follow.channel.name]))) {
offlines.push(follow.channel)
}
return offlines
},
[] as RawChannel[]
)
let own: RawStream | null = null
if (!_.isNil(this.userId)) {
const { stream } = await Twitch.fetchStream(this.userId)
if (!_.isNil(stream)) {
own = stream
}
}
return { offline, online: streams, own }
}
/**
* Fetches the follow relationship between the current user and another user.
* @param targetId - The target user id.
* @return The follow relationship if any.
*/
public static async fetchRelationship(targetId: string) {
if (_.isNil(Twitch.userId)) {
throw new Error('Missing source user id for relationship fetching.')
}
const params = {
from_id: Twitch.userId,
to_id: targetId,
}
const response = await Twitch.fetch(TwitchApi.Helix, '/users/follows', params)
const relationships = (await response.json()) as RawRelationships
if (relationships.total === 1) {
const relationship = _.head(relationships.data)
if (!_.isNil(relationship)) {
return relationship
}
}
return null
}
/**
* Fetches the total number of followers of a specific user.
* @param targetId - The target user id.
* @return The total number of followers.
*/
public static async fetchFollowersCount(targetId: string) {
const response = await Twitch.fetch(TwitchApi.Helix, '/users/follows', { to_id: targetId })
const relationships = (await response.json()) as RawRelationships
return relationships.total
}
/**
* Fetches all follows for the current authenticated user.
* @param [offset=0] - The offset to use while fetching follows.
* @param [limit=100] - The number of follows to fetch per query.
* @return The follows.
*/
public static async fetchAuthenticatedUserFollows(offset = 0, limit = 100): Promise<RawFollow[]> {
const params = {
limit: limit.toString(),
offset: offset.toString(),
sortby: 'last_broadcast',
}
const response = await Twitch.fetch(TwitchApi.Kraken, `/users/${Twitch.userId}/follows/channels`, params, true)
const { follows } = (await response.json()) as RawFollows
let allFollows = [...follows]
if (follows.length === limit) {
const nextFollows = await Twitch.fetchAuthenticatedUserFollows(offset + limit, limit)
allFollows = [...allFollows, ...nextFollows]
}
return allFollows
}
/**
* Fetches all online followed streams for the current authenticated user.
* @param [offset=0] - The offset to use while fetching streams.
* @param [limit=100] - The number of streams to fetch per query.
* @return The streams.
*/
public static async fetchAuthenticatedUserStreams(offset = 0, limit = 100): Promise<RawStream[]> {
const params = {
limit: limit.toString(),
offset: offset.toString(),
stream_type: 'live',
}
const response = await Twitch.fetch(TwitchApi.Kraken, '/streams/followed', params, true)
const { streams } = (await response.json()) as RawStreams
let allStreams = [...streams]
if (allStreams.length === limit) {
const nextStreams = await Twitch.fetchAuthenticatedUserStreams(offset + limit, limit)
allStreams = [...allStreams, ...nextStreams]
}
return allStreams
}
/**
* Blocks a user.
* @param targetId - The id of the user to block.
*/
public static blockUser(targetId: string) {
return Twitch.fetch(
TwitchApi.Helix,
'/users/blocks',
{ target_user_id: targetId, source_context: 'chat' },
true,
RequestMethod.Put
)
}
/**
* Unblocks a user.
* @param targetId - The id of the user to unblock.
*/
public static unblockUser(targetId: string) {
return Twitch.fetch(TwitchApi.Helix, '/users/blocks', { target_user_id: targetId }, true, RequestMethod.Delete)
}
/**
* Fetches the schedule of a specific channel.
* @param channelId - The id of the channel.
* @return The schedule of a channel if any.
*/
public static async fetchSchedule(channelId: string) {
const response = await Twitch.fetch(TwitchApi.Helix, '/schedule', {
broadcaster_id: channelId,
first: '25',
utc_offset: new Date().getTimezoneOffset().toString(),
})
const schedule = (await response.json()) as RawSchedule
return schedule.data
}
/**
* Defines if an object is either a stream or a channel.
* @param streamOrChannel - The stream or channel to identify.
* @return `true` of the parameter is a stream.
*/
public static isStream(streamOrChannel: RawStream | RawChannel): streamOrChannel is RawStream {
return !_.isNil(_.get(streamOrChannel, 'stream_type'))
}
private static token: string | null
private static userId: string | null
/**
* Returns the ID of the current authenticated user.
* @return The user ID.
*/
public static getAuthenticatedUserId(): string {
if (_.isNil(Twitch.userId)) {
throw new Error('No user ID found.')
}
return Twitch.userId
}
/**
* Returns the token of the current authenticated user.
* @return The token.
*/
public static getAuthenticatedUserToken(): string {
if (_.isNil(Twitch.token)) {
throw new Error('No token found.')
}
return Twitch.token
}
/**
* Returns the URL for a request.
* @param api - The Twitch API to use.
* @param endpoint - The endpoint to fetch.
* @param [searchParams] - Additional search parameters.
* @param [proxy=false] - `true` to use a CORS proxy.
* @return The URL.
*/
private static getUrl(api: TwitchApi, endpoint: string, searchParams: Record<string, string> = {}, proxy = false) {
const url = new URL(`${proxy ? ProxyURL : ''}${api}${endpoint}`)
_.forEach(searchParams, (value, key) => url.searchParams.set(key, value))
return url.toString()
}
/**
* Fetches an URL.
* @param api - The Twitch API to use.
* @param endpoint - The endpoint to fetch.
* @param [searchParams={}] - Additional search parameters.
* @param [authenticated=false] - Defines if the endpoint requires authentication or not.
* @param [method=RequestMethod.Get] - The request method.
* @param [body] - The request body.
* @param [signal] - A signal to abort the query.
* @return The response.
*/
private static async fetch(
api: TwitchApi,
endpoint: string,
searchParams: Record<string, string> = {},
authenticated = false,
method = RequestMethod.Get,
body?: object,
signal?: AbortSignal
) {
const url = Twitch.getUrl(api, endpoint, searchParams)
const headers = new Headers({
Accept: 'application/vnd.twitchtv.v5+json',
'Client-ID': process.env.REACT_APP_TWITCH_CLIENT_ID,
'Content-Type': 'application/json; charset=UTF-8',
})
if (authenticated || api === TwitchApi.Helix) {
const authHeader = Twitch.getAuthHeader(api)
_.forEach(authHeader, (value, name) => {
headers.append(name, value)
})
}
const init: RequestInit = { headers, method }
if (!_.isNil(body)) {
init.body = JSON.stringify(body)
}
if (!_.isNil(signal)) {
init.signal = signal
}
const request = new Request(url, init)
const response = await fetch(request)
if (response.status >= 400) {
const json = await response.json()
const originalMessage = _.get(json, 'message', 'Something went wrong.')
const message =
originalMessage.charAt(0) === '{'
? _.get(JSON.parse(originalMessage), 'message', 'Something went wrong.')
: originalMessage
throw new Error(message)
}
return response
}
/**
* Fetches Twitch public JWK.
* @return The JWK.
*/
private static async fetchJWK() {
const jwkReponse = await fetch(Twitch.getUrl(TwitchApi.Auth, '/keys'))
const jwk = await jwkReponse.json()
return jwk as JsonWebKey
}
/**
* Returns an auth header that can be used for authenticated request.
* @param api - The API to get an auth token for.
* @return The header.
*/
private static getAuthHeader(api: TwitchApi) {
if (_.isNil(Twitch.token)) {
throw new Error('Missing token for authenticated request.')
}
return { Authorization: `${api === TwitchApi.Helix ? 'Bearer' : 'OAuth'} ${Twitch.token}` }
}
}
/**
* ID token.
*/
export type IdToken = {
aud: string
azp: string
exp: number
iat: number
iss: string
preferred_username: string
sub: string
}
/**
* Twitch badges.
*/
export type RawBadges = Record<string, { versions: Record<string, RawBadge> }>
/**
* Twitch badge.
*/
export type RawBadge = {
click_action: string
click_url: string
description: string
image_url_1x: string
image_url_2x: string
image_url_4x: string
title: string
}
/**
* Twitch user details returned by the Helix API.
*/
export type RawHelixUser = {
broadcaster_type: 'partner' | 'affiliate' | ''
created_at: string
description: string
display_name: string
email?: string
id: string
login: string
offline_image_url: string
profile_image_url: string
type: 'staff' | 'admin' | 'global_mod' | ''
view_count: number
}
/**
* Twitch channel details.
*/
export type RawChannel = {
mature: boolean
status: string | null
broadcaster_language: string
display_name: string
game: string | null
language: string
_id: string
name: string
created_at: string
updated_at: string
partner: boolean
logo: string
video_banner: string | null
profile_banner: string | null
profile_banner_background_color: string | null
url: string
views: number
followers: number
broadcaster_type: string
description: string
private_video: boolean
privacy_options_enabled: boolean
}
/**
* Twitch channel informations.
*/
export type RawChannelInformations = {
broadcaster_id: string
broadcaster_name: string
broadcaster_language: string
game_id: string
game_name: string
title: string
}
/**
* Twitch chatters details.
*/
type RawChattersDetails = {
chatter_count: number
chatters: RawChatters
}
/**
* Twitch chatters.
*/
export type RawChatters = {
admins: string[]
broadcaster: string[]
global_mods: string[]
moderators: string[]
staff: string[]
viewers: string[]
vips: string[]
}
/**
* Twitch clip.
*/
export type RawClip = {
broadcaster_id: string
broadcaster_name: string
created_at: string
creator_id: string
creator_name: string
embed_url: string
game_id: string
id: string
language: string
thumbnail_url: string
title: string
url: string
video_id: string
view_count: number
}
/**
* Twitch follows.
*/
export type RawFollows = {
follows: RawFollow[]
_total: number
}
/**
* Twitch follow.
*/
export type RawFollow = { created_at: string; notifications: true; channel: RawChannel }
/**
* Twitch streams.
*/
export type RawStreams = {
streams: RawStream[]
_total: number
}
/**
* Twitch stream.
*/
export type RawStream = {
average_fps: number
broadcast_platform: string
channel: RawChannel
community_id: string
community_ids: string[]
created_at: string
delay: number
game: number
is_playlist: boolean
preview: RawPreview
stream_type: string
video_height: number
viewers: number
_id: string
}
/**
* Twitch Cheermote.
*/
export type RawCheermote = {
is_charitable: boolean
last_updated: string
order: number
prefix: string
tiers: RawCheermoteTier[]
type: 'global_first_party' | 'global_third_party' | 'channel_custom' | 'display_only' | 'sponsored'
}
/**
* Twitch Cheermote tier.
*/
type RawCheermoteTier = {
can_cheer: boolean
color: string
id: string
images: Record<CheermoteImageBackground, RawCheermoteImages>
min_bits: number
show_in_bits_card: boolean
}
/**
* Twitch video.
*/
export type RawVideo = {
id: string
user_id: string
user_name: string
title: string
description: string
created_at: string
published_at: string
url: string
thumbnail_url: string
viewable: 'public' | 'private'
view_count: number
language: string
type: Exclude<BroadcastType, BroadcastType.All>
duration: string
}
/**
* Twitch preview.
*/
type RawPreview = {
large: string
medium: string
small: string
template: string
}
/**
* Twitch new clip.
*/
type RawNewClips = {
data: Array<{ edit_url: string; id: string }>
}
/**
* Twitch follow relationships.
*/
type RawRelationships = {
data: RawRelationship[]
total: number
pagination: {
cursor: string
}
}
/**
* Twitch follow relationship.
*/
export type RawRelationship = {
from_id: string
to_id: string
followed_at: string
}
/**
* Twitch host.
*/
export type RawHost = {
host_id: string
targer_id: string
host_login: string
target_login: string
host_display_name: string
target_display_name: string
}
/**
* Twitch schedule.
*/
export type RawSchedule = {
data: {
segments: RawScheduleSegment[]
broadcaster_id: string
broadcaster_name: String
broadcaster_login: string
vacation: null | { start_time: string; end_time: string }
}
pagination: {
cursor: string
}
}
/**
* Twitch schedule segment.
*/
export type RawScheduleSegment = {
id: string
start_time: string
end_time: string
title: string
canceled_until: string | null
category: null | {
id: string
name: string
}
is_recurring: boolean
}
/**
* Twitch Cheermote images.
*/
type RawCheermoteImages = Record<CheermoteImageType, RawCheermoteImage>
/**
* Twitch Cheermote image.
*/
export type RawCheermoteImage = Record<CheermoteImageScales, string>
/**
* Cheermotes related types.
*/
export type CheermoteImageBackground = 'dark' | 'light'
type CheermoteImageType = 'static' | 'animated'
type CheermoteImageScales = '1' | '1.5' | '2' | '3' | '4'
/**
* Online stream, offline channel and own stream if online.
*/
export type Followers = {
offline: RawChannel[]
online: RawStream[]
own: RawStream | null
}
/**
* Online stream or offline channel.
*/
export type Follower = RawStream | RawChannel
/**
* State send to Twitch while authenticating.
*/
interface AuthState {
token: string
redirect?: string
} | the_stack |
/// <reference path="../metricsPlugin.ts"/>
module HawkularMetrics {
export interface IMultiDataPoint {
key: string;
color: string;
values: IChartDataPoint[];
}
class JVMMetricsTabType {
private _key: string;
private _metricName: string;
private _color: IColor;
private _statisticsKey: string;
public static HEAP_COMMITTED = new JVMMetricsTabType('Heap Committed', 'Heap Committed', '#515252');
public static HEAP_USED = new JVMMetricsTabType('Heap Used', 'Heap Used', '#1884c7', 'heapUsage');
public static HEAP_MAX = new JVMMetricsTabType('Heap Max', 'Heap Max', '#f57f20', 'heapMax');
public static NON_HEAP_COMMITTED = new JVMMetricsTabType('NonHeap Committed');
public static NON_HEAP_USED = new JVMMetricsTabType('NonHeap Used');
public static ACCUMULATED_GC_DURATION = new JVMMetricsTabType('Accumulated GC Duration', undefined, undefined,
'accGCDuration');
constructor(metricName: string, key?: string, color?: IColor, statisticsKey?: string) {
this._metricName = metricName;
this._key = key;
this._color = color;
this._statisticsKey = statisticsKey;
}
public getKey() {
if (!this._key) {
return this._metricName;
} else {
return this._key;
}
}
public getMetricName() {
return this._metricName;
}
public getWildflyFullMetricName() {
return 'WildFly Memory Metrics~' + this._metricName;
}
public getColor() {
return this._color;
}
public getStatisticsKey() {
return this._statisticsKey;
}
}
export class AppServerJvmDetailsController implements IRefreshable {
public static MAX_HEAP = 1024 * 1024 * 1024;
public static BYTES2MB = 1 / 1024 / 1024;
public math = Math;
public alertList: any[] = [];
public chartHeapData: IMultiDataPoint[];
public chartNonHeapData: IMultiDataPoint[];
public startTimeStamp: TimestampInMillis;
public endTimeStamp: TimestampInMillis;
public chartGCDurationData: IChartDataPoint[];
public contextChartHeapUsedData: IContextChartDataPoint[];
public contextChartNonHeapUsedData: IContextChartDataPoint[];
public contextChartGCDurationData: IContextChartDataPoint[];
// will contain in the format: 'metric name' : true | false
public skipChartData = {};
private feedId: FeedId;
private resourceId: ResourceId;
constructor(private $scope: any,
private $rootScope: IHawkularRootScope,
private $interval: ng.IIntervalService,
private $routeParams: any,
private $log: ng.ILogService,
private HawkularNav: any,
private HawkularAlertRouterManager: IHawkularAlertRouterManager,
private MetricsService: IMetricsService,
private $q: ng.IQService) {
$scope.vm = this;
this.feedId = this.$routeParams.feedId;
this.resourceId = this.$routeParams.resourceId + '~~';
this.startTimeStamp = +moment().subtract(($routeParams.timeOffset || 3600000), 'milliseconds');
this.endTimeStamp = +moment();
this.chartHeapData = [];
this.chartNonHeapData = [];
if ($rootScope.currentPersona) {
this.refresh();
} else {
// currentPersona hasn't been injected to the rootScope yet, wait for it..
$rootScope.$watch('currentPersona',
(currentPersona) => currentPersona && this.refresh());
}
// handle drag ranges on charts to change the time range
this.$scope.$on(EventNames.CHART_TIMERANGE_CHANGED, (event, data: Date[]) => {
this.changeTimeRange(data);
});
// handle drag ranges on charts to change the time range
this.$scope.$on('ContextChartTimeRangeChanged', (event, data: Date[]) => {
this.$log.debug('Received ContextChartTimeRangeChanged event' + data);
this.changeTimeRange(data);
});
this.HawkularAlertRouterManager.registerForAlerts(
this.$routeParams.feedId + '/' + this.$routeParams.resourceId,
'jvm',
_.bind(this.filterAlerts, this)
);
this.autoRefresh(20);
}
private changeTimeRange(data: Date[]): void {
this.startTimeStamp = data[0].getTime();
this.endTimeStamp = data[1].getTime();
this.HawkularNav.setTimestampStartEnd(this.startTimeStamp, this.endTimeStamp);
this.refresh();
}
private autoRefreshPromise: ng.IPromise<number>;
private autoRefresh(intervalInSeconds: number): void {
this.autoRefreshPromise = this.$interval(() => {
this.refresh();
}, intervalInSeconds * 1000);
this.$scope.$on('$destroy', () => {
this.$interval.cancel(this.autoRefreshPromise);
});
}
public refresh(): void {
this.endTimeStamp = this.$routeParams.endTime || +moment();
this.startTimeStamp = this.endTimeStamp - (this.$routeParams.timeOffset || 3600000);
this.getJvmContextChartData();
this.getJvmData();
this.getAlerts();
this.$rootScope.lastUpdateTimestamp = new Date();
}
public filterAlerts(alertData: IHawkularAlertQueryResult) {
let alertList = alertData.alertList;
_.remove(alertList, (item: IAlert) => {
switch (item.context.alertType) {
case 'PHEAP':
case 'NHEAP':
case 'GARBA':
item.alertType = item.context.alertType;
return false;
default:
return true;
}
});
this.alertList = alertList;
}
private getAlerts(): void {
this.HawkularAlertRouterManager.getAlertsForCurrentResource(
this.startTimeStamp,
this.endTimeStamp
);
}
public getJvmData(): void {
this.getJvmAggregateStatistics();
this.getJvmChartData();
}
private getJvmAggregateStatistics(): void {
this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.HEAP_USED.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 1).then((resource: IChartDataPoint[]) => {
if (resource.length) {
this[JVMMetricsTabType.HEAP_USED.getStatisticsKey()] = resource[0];
}
});
this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.HEAP_MAX.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 1).then((resource) => {
if (resource.length) {
this[JVMMetricsTabType.HEAP_MAX.getStatisticsKey()] = resource[0];
AppServerJvmDetailsController.MAX_HEAP = resource[0].max;
}
});
this.MetricsService.retrieveCounterMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.ACCUMULATED_GC_DURATION.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 1).then((resource) => {
if (resource.length) {
this[JVMMetricsTabType.ACCUMULATED_GC_DURATION.getStatisticsKey()] = (resource[0].max - resource[0].min);
}
});
}
private getJvmContextChartData(): void {
// because the time range is so much greater here we need more points of granularity
const contextStartTimestamp = +moment(this.endTimeStamp).subtract(1, globalContextChartTimePeriod);
this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.HEAP_USED.getWildflyFullMetricName()),
contextStartTimestamp, this.endTimeStamp, globalNumberOfContextChartDataPoints).then((contextData) => {
this.contextChartHeapUsedData = MetricsService.formatContextChartOutput(contextData);
});
this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.NON_HEAP_USED.getWildflyFullMetricName()),
contextStartTimestamp, this.endTimeStamp, globalNumberOfContextChartDataPoints).then((contextData) => {
this.contextChartNonHeapUsedData = MetricsService.formatContextChartOutput(contextData);
});
this.MetricsService.retrieveCounterRateMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
JVMMetricsTabType.ACCUMULATED_GC_DURATION.getWildflyFullMetricName()),
contextStartTimestamp, this.endTimeStamp, globalNumberOfContextChartDataPoints).then((contextData) => {
this.contextChartGCDurationData = MetricsService.formatContextChartOutput(contextData);
});
}
private getJvmChartData(): void {
let tmpChartHeapData = [];
let heapPromises = [];
let tmpChartNonHeapData = [];
let nonHeapPromises = [];
const heapCommitted = JVMMetricsTabType.HEAP_COMMITTED;
if (!this.skipChartData[heapCommitted.getKey()]) {
let hCommPromise = this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
heapCommitted.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60);
heapPromises.push(hCommPromise);
hCommPromise.then((data) => {
tmpChartHeapData[tmpChartHeapData.length] = {
key: heapCommitted.getKey(),
color: heapCommitted.getColor(),
values: MetricsService.formatBucketedChartOutput(data, AppServerJvmDetailsController.BYTES2MB)
};
});
}
const heapUsed = JVMMetricsTabType.HEAP_USED;
if (!this.skipChartData[heapUsed.getKey()]) {
let hUsedPromise = this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
heapUsed.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60);
heapPromises.push(hUsedPromise);
hUsedPromise.then((data) => {
let chartHeapDataUsed = MetricsService.formatBucketedChartOutput(data,
AppServerJvmDetailsController.BYTES2MB);
tmpChartHeapData[tmpChartHeapData.length] = {
key: heapUsed.getKey(),
color: heapUsed.getColor(),
values: chartHeapDataUsed
};
});
}
const heapMax = JVMMetricsTabType.HEAP_MAX;
if (!this.skipChartData[heapMax.getKey()]) {
let hMaxPromise = this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
heapMax.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60);
heapPromises.push(hMaxPromise);
hMaxPromise.then((data) => {
tmpChartHeapData[tmpChartHeapData.length] = {
key: heapMax.getKey(),
color: heapMax.getColor(),
values: MetricsService.formatBucketedChartOutput(data, AppServerJvmDetailsController.BYTES2MB)
};
});
}
this.$q.all(heapPromises).finally(() => {
this.chartHeapData = tmpChartHeapData;
});
const nonHeapCommitted = JVMMetricsTabType.NON_HEAP_COMMITTED;
if (!this.skipChartData[nonHeapCommitted.getKey()]) {
let nhCommPromise = this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
nonHeapCommitted.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60);
nonHeapPromises.push(nhCommPromise);
nhCommPromise.then((data) => {
tmpChartNonHeapData[tmpChartNonHeapData.length] = {
key: nonHeapCommitted.getKey(),
color: nonHeapCommitted.getColor(),
values: MetricsService.formatBucketedChartOutput(data, AppServerJvmDetailsController.BYTES2MB)
};
});
}
const nonHeapUsed = JVMMetricsTabType.NON_HEAP_USED;
if (!this.skipChartData[nonHeapUsed.getKey()]) {
let nhUsedPromise = this.MetricsService.retrieveGaugeMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
nonHeapUsed.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60);
nonHeapPromises.push(nhUsedPromise);
nhUsedPromise.then((data) => {
tmpChartNonHeapData[tmpChartNonHeapData.length] = {
key: nonHeapUsed.getKey(),
color: nonHeapUsed.getColor(),
values: MetricsService.formatBucketedChartOutput(data, AppServerJvmDetailsController.BYTES2MB)
};
});
}
this.$q.all(nonHeapPromises).finally(() => {
this.chartNonHeapData = tmpChartNonHeapData;
});
const accumulatedGCDuration = JVMMetricsTabType.ACCUMULATED_GC_DURATION;
this.MetricsService.retrieveCounterRateMetrics(this.$rootScope.currentPersona.id,
MetricsService.getMetricId('M', this.feedId, this.resourceId,
accumulatedGCDuration.getWildflyFullMetricName()),
this.startTimeStamp, this.endTimeStamp, 60).then((resource) => {
if (resource.length) {
this.chartGCDurationData = MetricsService.formatBucketedChartOutput(resource);
}
});
}
public toggleChartData(name): void {
this.skipChartData[name] = !this.skipChartData[name];
this.getJvmChartData();
}
}
_module.controller('AppServerJvmDetailsController', AppServerJvmDetailsController);
} | the_stack |
import {
logger,
Logger,
InitializedEvent,
Response,
Event,
TerminatedEvent,
BreakpointEvent,
Thread,
ThreadEvent,
CapabilitiesEvent,
OutputEvent,
DebugSession as SessionImpl,
ProgressStartEvent,
ProgressEndEvent
} from "vscode-debugadapter";
import { DebugProtocol as P } from "./protocol";
import { toThread, Handler as CommsHandler } from "./comms";
import { Message } from "vscode-debugadapter/lib/messages";
import { spawn, ChildProcess } from "child_process";
import * as path from "path";
import subscribe from "./wscomms";
import { totalmem } from "os";
const MAX_OLD_SPACE = Math.floor(totalmem() / (2 * 1024 * 1024));
interface Handler extends CommsHandler {
dataBreakpoints?: Set<P.Breakpoint>;
}
const normalizeDrive =
typeof process !== "undefined" && process.platform === "win32"
? function normalizeDrive(path: string) {
return path && path.length > 2 && path[1] === ":"
? path.charAt(0).toUpperCase() + path.slice(1)
: path;
}
: function (path: string) {
return path;
};
function packageBase(name: string) {
const f = name[0];
if (f === "@") return name.split("/").slice(0, 2).join("/");
if (f === "." || f === "/" || f === "~" || name[1] === ":") return f;
return name.split("/")[0];
}
const runningCommands: Map<string, ChildProcess> = new Map();
const RUNINTERMINAL_TIMEOUT = 5000;
const CONFIGURATION_DONE_REQUEST_TIMEOUT = 1000;
interface BreakpointInfo {
id: number;
source: P.Source;
remotes: Map<number, P.Breakpoint>;
request: P.SourceBreakpoint;
response: P.Breakpoint;
}
let progressCnt = 0;
const INSTAL_INSTRUCTION = `
Please, install "@effectful/debugger" manually:
$ npm install -g @effectful/debugger
And link it to your project:
$ npm link @effectful/debugger
WARNING: Installing it as your project local dependency won't work.
The runtime and the project dependencies shouldn't be deduped together.
`;
const BROWSERS_ZERO_CONFIG_NOT_SUPPORTED = `
Unfortunately, I had to remove zero-config for nextjs/browser.
I don't have time to cope with all the breaking changes in the
dependencies. Maybe I'll restore it in the future.
If you can help, please let me know.
Meanwhile you still can use the debugger with nextjs and browser but
with a simple configuration (see README).
`;
export class DebugSession extends SessionImpl {
private remotes: Map<number, Handler> = new Map();
private connectCb?: (h?: Handler) => void;
public progressHandler?: (n: string) => () => void;
public showError?: (n: string) => void;
private childProcess: ChildProcess | undefined;
private stopped: boolean = false;
private supportsRunInTerminalRequest = false;
private awaitReconnect: number | undefined;
private stopComms?: () => void;
private launchArgs: P.LaunchRequestArguments | undefined;
private launched = false;
private exceptionArgs: P.SetExceptionBreakpointsArguments | undefined;
private exitCode: number | undefined;
private breakpointsSrcs: Map<string | number, BreakpointInfo[]> = new Map();
private breakpointsIds: Map<number, BreakpointInfo> = new Map();
private breakpointsResponseRemotes?: Set<number>;
private breakpointsCount = 0;
private breakpointsResponse?: P.SetBreakpointsResponse;
private lastThread = 0;
private supportsProgress = false;
private knownThreadNames: { [name: string]: number } = {};
/**
* Creates a new debug adapter that is used for one debug session.
* We configure the default implementation of a debug adapter here.
*/
public constructor() {
super(true);
// super("effectful-debug.log");
// this.obsolete_logFilePath = obsolete_logFilePath;
this.on("error", event => {
logger.error(event.body);
});
this.setDebuggerLinesStartAt1(false);
this.setDebuggerColumnsStartAt1(false);
}
start(inStream: NodeJS.ReadableStream, outStream: NodeJS.WritableStream) {
super.start(inStream, outStream);
logger.init(
e => {
this.sendEvent(e);
},
"effectful-debug.log",
this._isServer
);
}
sendEvent(event: P.Event): void {
if (event.event !== "output")
logger.verbose(`sendEvent: ${JSON.stringify(event)}`);
super.sendEvent(event);
}
sendRequest(
command: string,
args: any,
timeout: number,
cb: (response: P.Response) => void
) {
logger.verbose(
`sendRequest: ${JSON.stringify(command)}(${JSON.stringify(
args
)}), timeout: ${timeout}`
);
super.sendRequest(command, args, timeout, cb);
}
sendResponse(response: P.Response) {
logger.verbose(`sendResponse: ${JSON.stringify(response)}`);
super.sendResponse(response);
}
private async closeRemote(remoteId: number) {
const remote = this.remotes.get(remoteId);
this.remotes.delete(remoteId);
for (const i of this.breakpointsIds.values()) {
i.remotes.delete(remoteId);
if (!i.remotes.size) {
i.response.verified = false;
this.sendEvent(new BreakpointEvent("changed", i.response));
}
}
if (remote) {
if (remote.dataBreakpoints) {
for (const i of remote.dataBreakpoints) {
i.verified = false;
this.sendEvent(
new Event("breakpoint", {
reason: "changed",
breakpoint: { id: i.id, verified: false }
})
);
}
}
this.sendEvent(new ThreadEvent("exited", remoteId));
}
if (!this.remotes.size) {
const reconnect = this.awaitReconnect || 0;
if (reconnect < 0) return;
await new Promise(i => setTimeout(i, reconnect));
if (this.remotes.size) return;
this.terminate(
this.exitCode
? `the main command exited with exit code ${this.exitCode}`
: "all threads are finished"
);
}
}
/**
* The 'initialize' request is the first request called by the frontend
* to interrogate the features the debug adapter provides.
*/
protected initializeRequest(
response: P.InitializeResponse,
args: P.InitializeRequestArguments
): void {
this.supportsRunInTerminalRequest = !!args.supportsRunInTerminalRequest;
this.supportsProgress = !!args.supportsProgressReporting;
response.body = response.body || {};
response.body.supportsConfigurationDoneRequest = true;
response.body.supportsStepBack = true;
response.body.supportsSetVariable = false;
response.body.supportsSetExpression = true;
response.body.supportsTerminateRequest = true;
response.body.supportTerminateDebuggee = true;
response.body.supportsLogPoints = true;
response.body.supportsHitConditionalBreakpoints = true;
response.body.supportsConditionalBreakpoints = true;
response.body.supportsFunctionBreakpoints = false;
response.body.supportsEvaluateForHovers = false;
response.body.supportsCompletionsRequest = false;
response.body.supportsRestartRequest = false;
response.body.supportsRestartFrame = false;
response.body.supportsExceptionOptions = true;
response.body.supportsExceptionInfoRequest = false;
response.body.supportsValueFormattingOptions = false;
response.body.supportsTerminateThreadsRequest = true;
response.body.supportsDataBreakpoints = false;
response.body.supportsReadMemoryRequest = false;
response.body.supportsDisassembleRequest = false;
response.body.supportsCancelRequest = false;
response.body.supportsBreakpointLocationsRequest = true;
response.body.supportsStepInTargetsRequest = false;
response.body.exceptionBreakpointFilters = [
{ filter: "all", label: "All Exceptions", default: false },
{ filter: "uncaught", label: "Uncaught Exceptions", default: true }
];
this.sendResponse(response);
this.sendEvent(new InitializedEvent());
}
private async sendAll(request: P.Request) {
if (!this.stopped) {
for (const remote of this.remotes.values()) {
remote.send(request);
}
}
}
protected sendToThread(threadId: number, msg: P.Request) {
const thread = this.remotes.get(threadId);
if (!thread) {
logger.verbose(`no remote ${threadId}`);
this.closeRemote(threadId);
return false;
}
this.lastThread = threadId;
thread.send(msg);
return true;
}
protected async dispatchRequest(request: P.Request) {
logger.verbose(`dispatchRequest: ${JSON.stringify(request)}`);
if (this.stopped) return;
switch (request.command) {
case "restart":
this.sendAll({ ...request, command: "childRestart" });
this.sendResponse(new Response(request));
return;
case "setExceptionBreakpoints":
this.sendResponse(new Response(request));
this.exceptionArgs = request.arguments;
this.sendAll({ ...request, command: "childSetExceptionBreakpoints" });
return;
case "breakpointLocations":
this.doBreakpointsLocations(<P.BreakpointLocationsRequest>request);
return;
case "setBreakpoints":
this.doSetBreakpoints(<P.SetBreakpointsRequest>request);
return;
case "terminateThreads":
const threadIds = request.arguments.threadIds;
if (threadIds)
for (const i of threadIds)
this.sendToThread(i, { ...request, command: "childTerminate" });
this.sendResponse(new Response(request));
break;
case "source":
{
const args: any = request.arguments;
if (
args.sourceReference != null &&
this.sendToThread(toThread(args.sourceReference), request)
)
return;
}
break;
case "setDataBreakpoints":
{
const args: any = request.arguments;
const byThread = new Map<number, P.DataBreakpoint[]>();
const responseBreakpoints: P.Breakpoint[] = [];
for (const i of args.breakpoints) {
if (i.enabled === false) continue;
const threadId: number = +(i.dataId?.match(/^(\d+)/) || [])[1];
if (isNaN(threadId)) continue;
const remote = this.remotes.get(threadId);
if (!remote) {
responseBreakpoints.push({ id: i.id, verified: false });
this.sendEvent(
new Event("breakpoint", {
reason: "removed",
breakpoint: { id: i.id }
})
);
continue;
}
let bps = byThread.get(threadId);
if (!bps) {
bps = [];
byThread.set(threadId, bps);
}
bps.push(i);
(
remote.dataBreakpoints || (remote.dataBreakpoints = new Set())
).add(i);
responseBreakpoints.push({ id: i.id, verified: true });
}
for (const threadId of this.remotes.keys()) {
args.breakpoints = byThread.get(threadId) || [];
this.sendToThread(threadId, request);
}
const response = <P.SetDataBreakpointsResponse>new Response(request);
response.body = { breakpoints: responseBreakpoints };
this.sendResponse(response);
}
break;
case "continue":
case "next":
case "stackTrace":
case "stepIn":
case "stepOut":
case "stepBack":
case "goto":
case "pause":
case "exceptionInfo":
case "stackTrace":
case "scopes":
case "variables":
case "evaluate":
case "setExpression":
case "reverseContinue":
case "dataBreakpointInfo":
const args: any = request.arguments;
if (args.threadId != null) {
if (this.sendToThread(args.threadId, request)) return;
break;
}
if (args.frameId != null) {
if (this.sendToThread(toThread(args.frameId), request)) return;
break;
}
if (args.variablesReference) {
if (this.sendToThread(toThread(args.variablesReference), request))
return;
break;
}
if (request.command === "evaluate") {
this.sendToThread(this.lastThread, request);
return;
}
logger.error("no thread's destination");
break;
case "terminate":
this.sendAll({ ...request, command: "childTerminate" });
this.sendResponse(new Response(request));
this.terminate();
return;
case "disconnect":
this.shutdown();
this.sendResponse(new Response(request));
return;
default:
super.dispatchRequest(request);
}
}
private terminate(reason?: string) {
if (reason) logger.verbose(`termination request: ${reason}`);
if (!this.stopped) this.sendEvent(new TerminatedEvent());
}
private dispatchResponse(thread: Handler, data: Message) {
if ((<any>data).event !== "output")
logger.verbose(`response: ${JSON.stringify(data)}`);
if (data.type === "event") {
let ev = <Event>data;
switch (ev.event) {
case "loadedSources":
const lsev = <any>ev;
if (lsev.body.breakpoints)
this.mergeResponseBreakpoints(lsev.body.breakpoints, thread.id);
delete lsev.body.breakpoints;
break;
case "continued":
case "stopped":
case "thread":
(<any>ev).body.threadId = thread.id;
this.lastThread = thread.id;
(<any>ev).body.allThreadsContinued = false;
}
this.sendEvent(ev);
} else if (data.type === "response") {
const response = <P.Response>data;
switch (response.command) {
case "continue":
(response.body || (response.body = {})).allThreadsContinued = false;
break;
case "breakpointLocations":
const cb = this.breakpointLocationsCb?.get(response.request_seq);
if (cb) cb([thread.id, <P.BreakpointLocationsResponse>response]);
return;
case "childSetExceptionBreakpoints":
case "childTerminate":
case "childRestart":
case "setDataBreakpoints":
return;
case "childLaunch":
if (response.body) {
if (!thread.name) {
let threadName = response.body.name || "Thread";
const count = this.knownThreadNames[threadName] || 0;
this.knownThreadNames[threadName] = count + 1;
if (count !== 0) threadName += `[${count}]`;
thread.name = threadName;
}
for (const i of response.body.breakpoints)
this.mergeResponseBreakpoints(i.breakpoints, thread.id);
}
this.sendEvent(new ThreadEvent("started", thread.id));
return;
case "childSetBreakpoints":
if (!this.breakpointsResponseRemotes) return;
this.breakpointsResponseRemotes.delete(thread.id);
if (response.body && response.body.breakpoints)
this.mergeResponseBreakpoints(
response.body.breakpoints,
thread.id,
true
);
if (this.breakpointsResponseRemotes.size !== 0) return;
if (
this.breakpointsResponseRemotes.size === 0 &&
this.breakpointsResponse
) {
this.sendResponse(this.breakpointsResponse);
this.breakpointsResponse = void 0;
}
return;
}
this.sendResponse(response);
}
}
public shutdown() {
if (this.stopped) return;
this.stopped = true;
if (this.connectCb) this.connectCb();
for (const i of this.remotes.values()) i.close();
if (this.stopComms) this.stopComms();
if (this.childProcess) this.childProcess.kill();
super.shutdown();
}
private configurationCb?: (args?: any) => void;
private configurationDone = false;
protected sendErrorResponse(
response: P.Response,
code: number,
msg: string
): void {
super.sendErrorResponse(response, code, msg);
// TODO: check why VS doesn't show this itself
// if (this.showError && msg) this.showError(msg);
}
/**
* Called at the end of the configuration sequence.
* Indicates that all breakpoints etc. have been sent to the DA and that the 'launch' can start.
*/
protected configurationDoneRequest(
response: P.ConfigurationDoneResponse,
args: P.ConfigurationDoneArguments
): void {
super.configurationDoneRequest(response, args);
if (this.configurationCb) this.configurationCb();
this.configurationDone = true;
}
protected async launchRequest(
response: P.LaunchResponse,
args: P.LaunchRequestArguments
) {
logger.setup(
args.verbose
? Logger.LogLevel.Verbose
: args.verbose === false
? Logger.LogLevel.Stop
: Logger.LogLevel.Log,
false
);
let cwd = args.cwd;
let progressId = this.supportsProgress && `LAUNCH$${progressCnt++}`;
const preset = args.preset || "node";
if (preset === "browser" || preset === "next") {
this.sendErrorResponse(
response,
1009,
BROWSERS_ZERO_CONFIG_NOT_SUPPORTED
);
return;
}
const isNode = preset === "node";
const needsLaunch = preset !== "listener";
if (!cwd) {
cwd = args.cwd = process.cwd();
}
const runtime = args.runtime || "@effectful/debugger";
const runtimeBase = packageBase(runtime);
let debuggerImpl: string;
const resolvePaths: string[] = require.resolve.paths && [
...new Set(
[cwd].concat(
<any>require.resolve.paths(cwd),
<any>require.resolve.paths(__dirname)
)
)
];
logger.log(`Searching ${runtimeBase} in ${resolvePaths}`);
try {
debuggerImpl = resolvePaths
? require.resolve(runtimeBase, { paths: resolvePaths })
: require.resolve(runtimeBase);
} catch (e) {
if (e.code !== "MODULE_NOT_FOUND") {
this.sendErrorResponse(
response,
1002,
`Couldn't resolve the debuggers runtime - ${e}`
);
return;
}
logger.log(
`couldn't find "${runtimeBase}" runtime, installing it....(please wait, this may take a few minutes)`
);
let cb: (b: boolean) => void;
if (progressId)
this.sendEvent(
new ProgressStartEvent(
`i$${progressId}`,
"Installing runtime (please wait, this may take a few minutes)"
)
);
const env = { ...process.env };
const child =
process.platform === "win32"
? spawn(
"npm",
[
"install",
"--no-package-lock",
"--no-save",
"--global-style",
"--no-audit",
runtimeBase
],
{ shell: true, cwd: path.join(__dirname, ".."), env }
)
: spawn(
process.env.SHELL || "bash",
[
"-ilc",
`"npm install --no-package-lock --no-save --global-style --no-audit ${runtimeBase}"`
],
{ shell: true, cwd: path.join(__dirname, ".."), env }
);
child.on("error", data => {
this.sendErrorResponse(
response,
1003,
`Cannot install ${runtimeBase} (${data.message}). ${INSTAL_INSTRUCTION}`
);
this.terminate("install error: " + data.message);
cb(true);
});
child.stdout.on("data", data => {
logger.log("install: " + String(data));
});
child.stderr.on("data", data => {
logger.log("install: " + String(data));
});
child.on("exit", code => {
if (progressId) this.sendEvent(new ProgressEndEvent(`i$${progressId}`));
if (!code) return cb(false);
this.sendErrorResponse(
response,
1003,
`Cannot install ${runtimeBase} (Exit code: ${code}). ${INSTAL_INSTRUCTION}`
);
cb(true);
});
if (await new Promise(i => (cb = i))) return;
debuggerImpl = path.resolve(
path.join(__dirname, "..", "node_modules", runtimeBase, "vscode.js")
);
}
logger.log(`Using ${runtime} from ${debuggerImpl}`);
debuggerImpl = path.dirname(normalizeDrive(debuggerImpl));
const runJs = path.join(debuggerImpl, "config", preset, "run.js");
const debuggerDeps =
process.env["EFFECTFUL_DEBUGGER_DEPS"] ||
(args.env && args.env["EFFECTFUL_DEBUGGER_DEPS"]) ||
path.resolve(path.join(debuggerImpl, "..", ".."));
this.stopComms = subscribe(
(remote: Handler) => {
logger.verbose(`new debuggee: ${remote.id}`);
this.remotes.set(remote.id, remote);
remote.onclose = () => this.closeRemote(remote.id);
remote.onmessage = data => this.dispatchResponse(remote, <Message>data);
remote.onerror = reason => logger.error(reason);
if (this.launched) this.launchChild(remote);
if (this.connectCb) this.connectCb();
},
args.debuggerHost || "localhost",
args.debuggerPort || 20011
);
this.launchArgs = args;
if (args.verbose) logger.verbose(`launch request ${JSON.stringify(args)}`);
this.sendEvent(
new CapabilitiesEvent({
supportsStepBack: !!args.timeTravel,
supportsRestartFrame: false,
supportsRestartRequest: !!args.fastRestart || args.preset !== "node",
supportsEvaluateForHovers: !!args.timeTravel,
supportsDataBreakpoints: !!args.timeTravel
})
);
let errMessage: string | undefined;
if (args.reconnectTimeout)
this.awaitReconnect = args.reconnectTimeout * 1000;
if (needsLaunch) {
const env: { [name: string]: string | null } = <any>{};
const host =
!args.debuggerHost ||
args.debuggerHost === "::" ||
args.debuggerHost === "0.0.0.0"
? "localhost"
: args.debuggerHost;
if (process.env["EFFECTFUL_DEBUGGER_VERBOSE"] == null)
env["EFFECTFUL_DEBUGGER_VERBOSE"] = args.verbose
? String(args.verbose)
: "0";
if (process.env["EFFECTFUL_DEBUGGER_URL"] == null)
env["EFFECTFUL_DEBUGGER_URL"] = `ws://${host}:${
args.debuggerPort || 20011
}`;
if (runtime) env["EFFECTFUL_DEBUGGER_RUNTIME"] = runtime;
env["EFFECTFUL_DEBUGGER_OPEN"] = args.open ? String(args.open) : "0";
env["EFFECTFUL_DEBUGGER_TIME_TRAVEL"] = args.timeTravel
? String(!!args.timeTravel)
: "0";
if (args.srcRoot) env["EFFECTFUL_DEBUGGER_SRC_ROOT"] = args.srcRoot;
if (args.env) Object.assign(env, args.env);
let term = this.supportsRunInTerminalRequest
? args.console
: "internalConsole";
if (term === true) term = "externalTerminal";
else if (!term) term = "internalConsole";
const reuse = args.reuse && term === "internalConsole";
if (
!(
"EFFECTFUL_DEBUGGER_RUNTIME_PACKAGES" in env ||
"EFFECTFUL_DEBUGGER_RUNTIME_PACKAGES" in process.env
)
)
env["EFFECTFUL_DEBUGGER_RUNTIME_PACKAGES"] = debuggerDeps;
if (args.include) env["EFFECTFUL_DEBUGGER_INCLUDE"] = args.include;
if (args.blackbox) env["EFFECTFUL_DEBUGGER_BLACKBOX"] = args.blackbox;
if (args.exclude) env["EFFECTFUL_DEBUGGER_EXCLUDE"] = args.exclude;
if (isNode) {
const node_path = [debuggerDeps];
if (env.NODE_PATH) node_path.push(env.NODE_PATH);
env.NODE_PATH = node_path.join(path.delimiter);
}
const launchArgs = [`--max-old-space-size=${MAX_OLD_SPACE}`, runJs];
if (typeof env["NODE_ARGS"] === "string")
launchArgs.unshift(env["NODE_ARGS"]);
if (args.command) launchArgs.push(args.command);
if (args.args) launchArgs.push(...args.args);
if (term === "externalTerminal" || term === "integratedTerminal") {
const termArgs: P.RunInTerminalRequestArguments = {
kind: term === "integratedTerminal" ? "integrated" : "external",
title: "Effectful Debug Console",
cwd,
args: ["node", ...launchArgs],
env
};
this.runInTerminalRequest(
termArgs,
RUNINTERMINAL_TIMEOUT,
runResponse => {
if (!runResponse.success) {
this.sendErrorResponse(
response,
1001,
`Cannot launch debug target in terminal (${runResponse.message}).`
);
this.terminate("terminal error: " + runResponse.message);
}
}
);
} else {
let child: ChildProcess | undefined;
const cmdline = launchArgs.slice(1).join(" ");
let key = cmdline;
const timeTravel = !!args.timeTravel;
if (reuse) {
key = `${cmdline}@${cwd}/${timeTravel}/${JSON.stringify(env)}`;
child = runningCommands.get(key);
}
let startBuf: string[] = [];
let progressPrefix: string | null = null;
if (progressId)
progressPrefix = env[
"EFFECTFUL_PROGRESS_ID"
] = `@progress@${progressId}:`;
if (!child) {
const spawnArgs: any = {
cwd,
env: { ...process.env, ...env },
shell: true
};
if (args.argv0) spawnArgs.argv0 = args.argv0;
child = spawn("node", launchArgs, spawnArgs);
let lastPercentage = 0;
let message = "";
logger.verbose(
`SPAWN: node ${cmdline} ${JSON.stringify({
...spawnArgs,
env
})}`
);
child.on("error", data => {
this.sendErrorResponse(
response,
1001,
`Cannot launch debug target in terminal (${data.message}).`
);
this.terminate("spawn error: " + data.message);
});
child.stdout.on("data", data => {
const txt = String(data);
if (args.verbose) logger.verbose(txt);
if (!this.launched) startBuf.push(txt);
});
child.stderr.on("data", data => {
const txt = String(data);
if (args.verbose) logger.error(txt);
if (!this.launched) startBuf.push(txt);
});
child.on("exit", code => {
if (!this.launched && startBuf.length) {
errMessage = startBuf.join("");
}
logger.verbose(`command "${cmdline}" exited with ${code}`);
if (args.reuse && key) runningCommands.delete(key);
this.closeRemote(0);
});
if (reuse && key) runningCommands.set(key, child);
else this.childProcess = child;
}
}
}
if (!this.remotes.size) {
logger.log("Awaiting a debuggee to connect back");
if (progressId)
this.sendEvent(
new ProgressStartEvent(`s$${progressId}`, "Awating a debuggee")
);
await new Promise<Handler | undefined>(i => (this.connectCb = i));
logger.verbose("first connection");
this.connectCb = undefined;
}
if (this.remotes.size && !this.stopped) {
// wait until configuration has finished (and configurationDoneRequest has been called)
if (!this.configurationDone) {
await Promise.race([
new Promise(i => (this.configurationCb = i)),
new Promise(i => setTimeout(i, CONFIGURATION_DONE_REQUEST_TIMEOUT))
]);
}
logger.verbose("config done");
for (const remote of this.remotes.values()) this.launchChild(remote);
}
if (progressId) this.sendEvent(new ProgressEndEvent(`s$${progressId}`));
if (this.stopped) {
response.success = false;
this.sendErrorResponse(
response,
1002,
errMessage || "The application has stopped"
);
return;
}
this.launched = true;
this.sendResponse(response);
}
private launchChild(remote: Handler): void {
const args = this.launchArgs || {};
logger.verbose(`launching {remote.id}...`);
remote.send({
command: "childLaunch",
arguments: {
threadId: remote.id,
noDebug: args.noDebug,
restart: args.__restart,
stopOnEntry: args.stopOnEntry,
stopOnExit: args.stopOnExit,
dirSep: path.sep,
exceptions: this.exceptionArgs,
fastRestart: args.fastRestart,
timeTravelDisabled: args.timeTravelDisabled,
onChange: args.onChange,
breakpoints: [...this.breakpointsSrcs].map(
([srcPath, breakpoints]) => ({
breakpoints: breakpoints.map(i => i.response),
source:
typeof srcPath === "number"
? { sourceReference: srcPath }
: { path: normalizeDrive(srcPath) }
})
)
}
});
}
private breakpointLocationsCb: Map<
number,
(arg: [number, P.BreakpointLocationsResponse]) => void
> = new Map();
private async doBreakpointsLocations(req: P.BreakpointLocationsRequest) {
const resp = <P.BreakpointLocationsResponse>new Response(req);
resp.body = { breakpoints: [] };
const awaiting: Set<number> = new Set(this.remotes.keys());
const args = <P.SetBreakpointsArguments>req.arguments;
if (args.source.path) args.source.path = normalizeDrive(args.source.path);
this.sendAll(req);
while (awaiting.size) {
const [remote, clientResp] = await new Promise(i =>
this.breakpointLocationsCb.set(req.seq, i)
);
awaiting.delete(remote);
if (clientResp.body.breakpoints.length) {
resp.body.breakpoints.push(...clientResp.body.breakpoints);
break;
}
}
this.breakpointLocationsCb.delete(req.seq);
this.sendResponse(resp);
}
private doSetBreakpoints(req: P.SetBreakpointsRequest): void {
const args = req.arguments;
const srcPath: string | number =
args.source.sourceReference || args.source.path || 0;
if (args.source.path) args.source.path = normalizeDrive(args.source.path);
// clear all breakpoints for this file
const response = <P.SetBreakpointsResponse>new Response(req);
const bps: BreakpointInfo[] = [];
if (args.breakpoints) {
for (const i of args.breakpoints) {
const id = ++this.breakpointsCount;
const response = {
...i,
id,
verified: false,
source: args.source
};
const bpi: BreakpointInfo = {
id,
remotes: new Map(),
source: args.source,
request: i,
response
};
bps.push(bpi);
this.breakpointsIds.set(id, bpi);
}
}
const old = this.breakpointsSrcs.get(srcPath);
if (old) {
for (const i of old) this.breakpointsIds.delete(i.id);
}
if (bps.length) {
this.breakpointsSrcs.set(srcPath, bps);
} else if (old) {
this.breakpointsSrcs.delete(srcPath);
}
const breakpoints: P.Breakpoint[] = bps.map(i => i.response);
response.body = { breakpoints };
if (this.remotes.size) {
this.breakpointsResponse = response;
this.breakpointsResponseRemotes = new Set(this.remotes.keys());
for (const remote of this.remotes.values()) {
remote.send({
command: "childSetBreakpoints",
seq: req.seq,
arguments: {
breakpoints,
source: args.source,
sourceModified: args.sourceModified
}
});
}
} else {
this.sendResponse(response);
}
}
private mergeResponseBreakpoints(
bodyBreakpoints: P.BreakpointInfo[],
remoteId: number,
isResponse?: boolean
) {
for (const i of bodyBreakpoints) {
const bpi = this.breakpointsIds.get(<any>i.id);
if (!bpi) continue;
const response = bpi.response;
// NextJS removes some functions from the sources, so breakpoints move to some next line
// we keep only the closest to the request breakpoints and ask the client to disable the
// moved breakpoint
if (i.verified) {
const origLine = bpi.request.line;
let diff = Infinity;
let minResponse: P.Breakpoint = response;
bpi.remotes.set(remoteId, i);
for (const bp of bpi.remotes.values()) {
if (!bp.line) continue;
const curDiff = Math.abs(bp.line - origLine);
if (curDiff > diff) continue;
diff = curDiff;
minResponse = bp;
}
for (const [remote, bp] of bpi.remotes) {
if (bp.line !== minResponse.line)
this.sendToThread(remote, {
seq: 0,
type: "request",
command: "childDisableBreakpoint",
arguments: { id: bp.id, source: bpi.source }
});
}
if (i.line === minResponse.line) Object.assign(bpi.response, i);
} else bpi.remotes.delete(remoteId);
if (!bpi.remotes.size) response.verified = false;
if (!isResponse) this.sendEvent(new BreakpointEvent("changed", response));
}
}
protected threadsRequest(response: P.ThreadsResponse): void {
// runtime supports now threads so just return a default thread.
response.body = {
threads: [...this.remotes].map(
([id, thread]) => new Thread(id, thread.name || `Thread ${id}`)
)
};
this.sendResponse(response);
}
protected disconnectRequest(
response: P.DisconnectResponse,
args: P.DisconnectArguments
): void {
logger.verbose("preparing disconnect");
this.stopped = true;
if (this.configurationCb) this.configurationCb();
super.disconnectRequest(response, args);
}
} | the_stack |
namespace fgui {
type PackageDependency = { id: string, name: string };
export class UIPackage {
private _id: string;
private _name: string;
private _path: string;
private _items: Array<PackageItem>;
private _itemsById: { [index: string]: PackageItem };
private _itemsByName: { [index: string]: PackageItem };
private _sprites: { [index: string]: AtlasSprite };
private _dependencies: Array<PackageDependency>;
private _branches: Array<string>;
public _branchIndex: number;
private _bundle: cc.AssetManager.Bundle;
public static _constructing: number = 0;
private static _instById: { [index: string]: UIPackage } = {};
private static _instByName: { [index: string]: UIPackage } = {};
private static _branch: string = "";
private static _vars: { [index: string]: string } = {};
public constructor() {
this._items = [];
this._itemsById = {};
this._itemsByName = {};
this._sprites = {};
this._dependencies = [];
this._branches = [];
this._branchIndex = -1;
}
public static get branch(): string {
return UIPackage._branch;
}
public static set branch(value: string) {
UIPackage._branch = value;
for (var pkgId in UIPackage._instById) {
var pkg: UIPackage = UIPackage._instById[pkgId];
if (pkg._branches) {
pkg._branchIndex = pkg._branches.indexOf(value);
}
}
}
public static getVar(key: string): string {
return UIPackage._vars[key];
}
public static setVar(key: string, value: string) {
UIPackage._vars[key] = value;
}
public static getById(id: string): UIPackage {
return UIPackage._instById[id];
}
public static getByName(name: string): UIPackage {
return UIPackage._instByName[name];
}
/**
* 注册一个包。包的所有资源必须放在resources下,且已经预加载。
* @param path 相对 resources 的路径。
*/
public static addPackage(path: string): UIPackage {
let pkg: UIPackage = UIPackage._instById[path];
if (pkg)
return pkg;
let asset: any = cc.resources.get(path, cc.BufferAsset);
if (!asset)
throw "Resource '" + path + "' not ready";
if (!asset._buffer)
throw "Missing asset data.";
pkg = new UIPackage();
pkg._bundle = cc.resources;
pkg.loadPackage(new ByteBuffer(asset._buffer), path);
UIPackage._instById[pkg.id] = pkg;
UIPackage._instByName[pkg.name] = pkg;
UIPackage._instById[pkg._path] = pkg;
return pkg;
}
/**
* 载入一个包。包的资源从Asset Bundle加载.
* @param bundle Asset Bundle 对象.
* @param path 资源相对 Asset Bundle 目录的路径.
* @param onComplete 载入成功后的回调.
*/
public static loadPackage(bundle: cc.AssetManager.Bundle, path: string, onComplete?: (error: any, pkg: UIPackage) => void): void;
/**
* 载入一个包。包的资源从Asset Bundle加载.
* @param bundle Asset Bundle 对象.
* @param path 资源相对 Asset Bundle 目录的路径.
* @param onProgress 加载进度回调.
* @param onComplete 载入成功后的回调.
*/
public static loadPackage(bundle: cc.AssetManager.Bundle, path: string, onProgress?: (finish: number, total: number, item: cc.AssetManager.RequestItem) => void, onComplete?: (error: any, pkg: UIPackage) => void): void;
/**
* 载入一个包。包的资源从resources加载.
* @param path 资源相对 resources 的路径.
* @param onComplete 载入成功后的回调.
*/
public static loadPackage(path: string, onComplete?: (error: any, pkg: UIPackage) => void): void;
/**
* 载入一个包。包的资源从resources加载.
* @param path 资源相对 resources 的路径.
* @param onProgress 加载进度回调.
* @param onComplete 载入成功后的回调.
*/
public static loadPackage(path: string, onProgress?: (finish: number, total: number, item: cc.AssetManager.RequestItem) => void, onComplete?: (error: any, pkg: UIPackage) => void): void;
public static loadPackage(...args: any[]) {
let path: string;
let onProgress: (finish: number, total: number, item: cc.AssetManager.RequestItem) => void;
let onComplete: (error: any, pkg: UIPackage) => void;
let bundle: cc.AssetManager.Bundle;
if (args[0] instanceof cc.AssetManager.Bundle) {
bundle = args[0];
path = args[1];
if (args.length > 3) {
onProgress = args[2];
onComplete = args[3];
}
else
onComplete = args[2];
}
else {
path = args[0];
if (args.length > 2) {
onProgress = args[1];
onComplete = args[2];
}
else
onComplete = args[1];
}
bundle = bundle || cc.resources;
bundle.load(path, cc.BufferAsset, onProgress, function (err, asset: any) {
if (err) {
if (onComplete != null)
onComplete(err, null);
return;
}
let pkg: UIPackage = new UIPackage();
pkg._bundle = bundle;
pkg.loadPackage(new ByteBuffer(asset._buffer), path);
let cnt: number = pkg._items.length;
let urls: Array<string> = [];
let types: Array<typeof cc.Asset> = [];
for (var i: number = 0; i < cnt; i++) {
var pi: PackageItem = pkg._items[i];
if (pi.type == PackageItemType.Atlas || pi.type == PackageItemType.Sound) {
let assetType = ItemTypeToAssetType[pi.type];
urls.push(pi.file);
types.push(assetType);
}
}
let total = urls.length;
let lastErr;
let taskComplete = (err?) => {
total--;
if (err)
lastErr = err;
if (total <= 0) {
UIPackage._instById[pkg.id] = pkg;
UIPackage._instByName[pkg.name] = pkg;
if (pkg._path)
UIPackage._instById[pkg._path] = pkg;
if (onComplete != null)
onComplete(lastErr, pkg);
}
}
if (total > 0) {
urls.forEach((url, index) => {
bundle.load(url, types[index], onProgress, taskComplete);
});
}
else
taskComplete();
});
}
public static removePackage(packageIdOrName: string): void {
var pkg: UIPackage = UIPackage._instById[packageIdOrName];
if (!pkg)
pkg = UIPackage._instByName[packageIdOrName];
if (!pkg)
throw "No package found: " + packageIdOrName;
pkg.dispose();
delete UIPackage._instById[pkg.id];
delete UIPackage._instByName[pkg.name];
if (pkg._path)
delete UIPackage._instById[pkg._path];
}
public static createObject(pkgName: string, resName: string, userClass?: new () => GObject): GObject {
var pkg: UIPackage = UIPackage.getByName(pkgName);
if (pkg)
return pkg.createObject(resName, userClass);
else
return null;
}
public static createObjectFromURL(url: string, userClass?: new () => GObject): GObject {
var pi: PackageItem = UIPackage.getItemByURL(url);
if (pi)
return pi.owner.internalCreateObject(pi, userClass);
else
return null;
}
public static getItemURL(pkgName: string, resName: string): string {
var pkg: UIPackage = UIPackage.getByName(pkgName);
if (!pkg)
return null;
var pi: PackageItem = pkg._itemsByName[resName];
if (!pi)
return null;
return "ui://" + pkg.id + pi.id;
}
public static getItemByURL(url: string): PackageItem {
var pos1: number = url.indexOf("//");
if (pos1 == -1)
return null;
var pos2: number = url.indexOf("/", pos1 + 2);
if (pos2 == -1) {
if (url.length > 13) {
var pkgId: string = url.substr(5, 8);
var pkg: UIPackage = UIPackage.getById(pkgId);
if (pkg != null) {
var srcId: string = url.substr(13);
return pkg.getItemById(srcId);
}
}
}
else {
var pkgName: string = url.substr(pos1 + 2, pos2 - pos1 - 2);
pkg = UIPackage.getByName(pkgName);
if (pkg != null) {
var srcName: string = url.substr(pos2 + 1);
return pkg.getItemByName(srcName);
}
}
return null;
}
public static normalizeURL(url: string): string {
if (url == null)
return null;
var pos1: number = url.indexOf("//");
if (pos1 == -1)
return null;
var pos2: number = url.indexOf("/", pos1 + 2);
if (pos2 == -1)
return url;
var pkgName: string = url.substr(pos1 + 2, pos2 - pos1 - 2);
var srcName: string = url.substr(pos2 + 1);
return UIPackage.getItemURL(pkgName, srcName);
}
public static setStringsSource(source: string): void {
TranslationHelper.loadFromXML(source);
}
private loadPackage(buffer: ByteBuffer, path: string): void {
if (buffer.readUint() != 0x46475549)
throw "FairyGUI: old package format found in '" + path + "'";
this._path = path;
buffer.version = buffer.readInt();
var ver2: boolean = buffer.version >= 2;
var compressed: boolean = buffer.readBool();
this._id = buffer.readString();
this._name = buffer.readString();
buffer.skip(20);
var indexTablePos: number = buffer.position;
var cnt: number;
var i: number;
var nextPos: number;
var str: string;
var branchIncluded: boolean;
buffer.seek(indexTablePos, 4);
cnt = buffer.readInt();
var stringTable: Array<string> = new Array<string>(cnt);
buffer.stringTable = stringTable;
for (i = 0; i < cnt; i++)
stringTable[i] = buffer.readString();
if (buffer.seek(indexTablePos, 5)) {
cnt = buffer.readInt();
for (i = 0; i < cnt; i++) {
let index = buffer.readUshort();
let len = buffer.readInt();
stringTable[index] = buffer.readString(len);
}
}
buffer.seek(indexTablePos, 0);
cnt = buffer.readShort();
for (i = 0; i < cnt; i++)
this._dependencies.push({ id: buffer.readS(), name: buffer.readS() });
if (ver2) {
cnt = buffer.readShort();
if (cnt > 0) {
this._branches = buffer.readSArray(cnt);
if (UIPackage._branch)
this._branchIndex = this._branches.indexOf(UIPackage._branch);
}
branchIncluded = cnt > 0;
}
buffer.seek(indexTablePos, 1);
var pi: PackageItem;
let pos = path.lastIndexOf('/');
let shortPath = pos == -1 ? "" : path.substr(0, pos + 1);
path = path + "_";
cnt = buffer.readShort();
for (i = 0; i < cnt; i++) {
nextPos = buffer.readInt();
nextPos += buffer.position;
pi = new PackageItem();
pi.owner = this;
pi.type = buffer.readByte();
pi.id = buffer.readS();
pi.name = buffer.readS();
buffer.readS(); //path
pi.file = buffer.readS();
buffer.readBool();//exported
pi.width = buffer.readInt();
pi.height = buffer.readInt();
switch (pi.type) {
case PackageItemType.Image:
{
pi.objectType = ObjectType.Image;
var scaleOption: number = buffer.readByte();
if (scaleOption == 1) {
pi.scale9Grid = new cc.Rect();
pi.scale9Grid.x = buffer.readInt();
pi.scale9Grid.y = buffer.readInt();
pi.scale9Grid.width = buffer.readInt();
pi.scale9Grid.height = buffer.readInt();
pi.tileGridIndice = buffer.readInt();
}
else if (scaleOption == 2)
pi.scaleByTile = true;
pi.smoothing = buffer.readBool();
break;
}
case PackageItemType.MovieClip:
{
pi.smoothing = buffer.readBool();
pi.objectType = ObjectType.MovieClip;
pi.rawData = buffer.readBuffer();
break;
}
case PackageItemType.Font:
{
pi.rawData = buffer.readBuffer();
break;
}
case PackageItemType.Component:
{
var extension: number = buffer.readByte();
if (extension > 0)
pi.objectType = extension;
else
pi.objectType = ObjectType.Component;
pi.rawData = buffer.readBuffer();
UIObjectFactory.resolveExtension(pi);
break;
}
case PackageItemType.Atlas:
case PackageItemType.Sound:
case PackageItemType.Misc:
{
pi.file = path + cc.path.mainFileName(pi.file);
break;
}
case PackageItemType.Spine:
case PackageItemType.DragonBones:
{
pi.file = shortPath + cc.path.mainFileName(pi.file);
pi.skeletonAnchor = new cc.Vec2();
pi.skeletonAnchor.x = buffer.readFloat();
pi.skeletonAnchor.y = buffer.readFloat();
break;
}
}
if (ver2) {
str = buffer.readS();//branch
if (str)
pi.name = str + "/" + pi.name;
var branchCnt: number = buffer.readUbyte();
if (branchCnt > 0) {
if (branchIncluded)
pi.branches = buffer.readSArray(branchCnt);
else
this._itemsById[buffer.readS()] = pi;
}
var highResCnt: number = buffer.readUbyte();
if (highResCnt > 0)
pi.highResolution = buffer.readSArray(highResCnt);
}
this._items.push(pi);
this._itemsById[pi.id] = pi;
if (pi.name != null)
this._itemsByName[pi.name] = pi;
buffer.position = nextPos;
}
buffer.seek(indexTablePos, 2);
cnt = buffer.readShort();
for (i = 0; i < cnt; i++) {
nextPos = buffer.readShort();
nextPos += buffer.position;
var itemId: string = buffer.readS();
pi = this._itemsById[buffer.readS()];
let rect: cc.Rect = new cc.Rect();
rect.x = buffer.readInt();
rect.y = buffer.readInt();
rect.width = buffer.readInt();
rect.height = buffer.readInt();
var sprite: AtlasSprite = { atlas: pi, rect: rect, offset: new cc.Vec2(), originalSize: new cc.Size(0, 0) };
sprite.rotated = buffer.readBool();
if (ver2 && buffer.readBool()) {
sprite.offset.x = buffer.readInt();
sprite.offset.y = buffer.readInt();
sprite.originalSize.width = buffer.readInt();
sprite.originalSize.height = buffer.readInt();
}
else {
sprite.originalSize.width = sprite.rect.width;
sprite.originalSize.height = sprite.rect.height;
}
this._sprites[itemId] = sprite;
buffer.position = nextPos;
}
if (buffer.seek(indexTablePos, 3)) {
cnt = buffer.readShort();
for (i = 0; i < cnt; i++) {
nextPos = buffer.readInt();
nextPos += buffer.position;
pi = this._itemsById[buffer.readS()];
if (pi && pi.type == PackageItemType.Image)
pi.hitTestData = new PixelHitTestData(buffer);
buffer.position = nextPos;
}
}
}
public dispose(): void {
var cnt: number = this._items.length;
for (var i: number = 0; i < cnt; i++) {
var pi: PackageItem = this._items[i];
if (pi.asset)
cc.assetManager.releaseAsset(pi.asset);
}
}
public get id(): string {
return this._id;
}
public get name(): string {
return this._name;
}
public get path(): string {
return this._path;
}
public get dependencies(): Array<PackageDependency> {
return this._dependencies;
}
public createObject(resName: string, userClass?: new () => GObject): GObject {
var pi: PackageItem = this._itemsByName[resName];
if (pi)
return this.internalCreateObject(pi, userClass);
else
return null;
}
public internalCreateObject(item: PackageItem, userClass?: new () => GObject): GObject {
var g: GObject = UIObjectFactory.newObject(item, userClass);
if (g == null)
return null;
UIPackage._constructing++;
g.constructFromResource();
UIPackage._constructing--;
return g;
}
public getItemById(itemId: string): PackageItem {
return this._itemsById[itemId];
}
public getItemByName(resName: string): PackageItem {
return this._itemsByName[resName];
}
public getItemAssetByName(resName: string): cc.Asset {
var pi: PackageItem = this._itemsByName[resName];
if (pi == null) {
throw "Resource not found -" + resName;
}
return this.getItemAsset(pi);
}
public getItemAsset(item: PackageItem): cc.Asset {
switch (item.type) {
case PackageItemType.Image:
if (!item.decoded) {
item.decoded = true;
var sprite: AtlasSprite = this._sprites[item.id];
if (sprite) {
let atlasTexture: cc.Texture2D = <cc.Texture2D>this.getItemAsset(sprite.atlas);
if (atlasTexture) {
let sf = new cc.SpriteFrame(atlasTexture, sprite.rect, sprite.rotated,
new cc.Vec2(sprite.offset.x - (sprite.originalSize.width - sprite.rect.width) / 2, -(sprite.offset.y - (sprite.originalSize.height - sprite.rect.height) / 2)),
sprite.originalSize);
if (item.scale9Grid) {
sf.insetLeft = item.scale9Grid.x;
sf.insetTop = item.scale9Grid.y;
sf.insetRight = item.width - item.scale9Grid.xMax;
sf.insetBottom = item.height - item.scale9Grid.yMax;
}
item.asset = sf;
}
}
}
break;
case PackageItemType.Atlas:
case PackageItemType.Sound:
if (!item.decoded) {
item.decoded = true;
item.asset = this._bundle.get(item.file, ItemTypeToAssetType[item.type]);
if (!item.asset)
console.log("Resource '" + item.file + "' not found");
}
break;
case PackageItemType.Font:
if (!item.decoded) {
item.decoded = true;
this.loadFont(item);
}
break;
case PackageItemType.MovieClip:
if (!item.decoded) {
item.decoded = true;
this.loadMovieClip(item);
}
break;
default:
break;
}
return item.asset;
}
public getItemAssetAsync(item: PackageItem, onComplete?: (err: Error, item: PackageItem) => void): void {
if (item.decoded) {
onComplete(null, item);
return;
}
if (item.loading) {
item.loading.push(onComplete);
return;
}
switch (item.type) {
case PackageItemType.Spine:
item.loading = [onComplete];
this.loadSpine(item);
break;
case PackageItemType.DragonBones:
item.loading = [onComplete];
this.loadDragonBones(item);
break;
default:
this.getItemAsset(item);
onComplete(null, item);
break;
}
}
public loadAllAssets(): void {
var cnt: number = this._items.length;
for (var i: number = 0; i < cnt; i++) {
var pi: PackageItem = this._items[i];
this.getItemAsset(pi);
}
}
private loadMovieClip(item: PackageItem): void {
var buffer: ByteBuffer = item.rawData;
buffer.seek(0, 0);
item.interval = buffer.readInt() / 1000;
item.swing = buffer.readBool();
item.repeatDelay = buffer.readInt() / 1000;
buffer.seek(0, 1);
var frameCount: number = buffer.readShort();
item.frames = Array<Frame>(frameCount);
var spriteId: string;
var sprite: AtlasSprite;
for (var i: number = 0; i < frameCount; i++) {
var nextPos: number = buffer.readShort();
nextPos += buffer.position;
let rect: cc.Rect = new cc.Rect();
rect.x = buffer.readInt();
rect.y = buffer.readInt();
rect.width = buffer.readInt();
rect.height = buffer.readInt();
let addDelay = buffer.readInt() / 1000;
let frame: Frame = { rect: rect, addDelay: addDelay };
spriteId = buffer.readS();
if (spriteId != null && (sprite = this._sprites[spriteId]) != null) {
let atlasTexture: cc.Texture2D = <cc.Texture2D>this.getItemAsset(sprite.atlas);
if (atlasTexture) {
let sx: number = item.width / frame.rect.width;
frame.texture = new cc.SpriteFrame(atlasTexture, sprite.rect, sprite.rotated,
new cc.Vec2(frame.rect.x - (item.width - frame.rect.width) / 2, -(frame.rect.y - (item.height - frame.rect.height) / 2)),
new cc.Size(item.width, item.height));
}
}
item.frames[i] = frame;
buffer.position = nextPos;
}
}
private loadFont(item: PackageItem): void {
var font: any = new cc.LabelAtlas();
item.asset = font;
font._fntConfig = {
commonHeight: 0,
fontSize: 0,
kerningDict: {},
fontDefDictionary: {}
};
let dict = font._fntConfig.fontDefDictionary;
var buffer: ByteBuffer = item.rawData;
buffer.seek(0, 0);
let ttf = buffer.readBool();
let canTint = buffer.readBool();
let resizable = buffer.readBool();
buffer.readBool(); //has channel
let fontSize = buffer.readInt();
var xadvance: number = buffer.readInt();
var lineHeight: number = buffer.readInt();
let mainTexture: cc.Texture2D;
var mainSprite: AtlasSprite = this._sprites[item.id];
if (mainSprite)
mainTexture = <cc.Texture2D>(this.getItemAsset(mainSprite.atlas));
buffer.seek(0, 1);
var bg: any;
var cnt: number = buffer.readInt();
for (var i: number = 0; i < cnt; i++) {
var nextPos: number = buffer.readShort();
nextPos += buffer.position;
bg = {};
var ch: number = buffer.readUshort();
dict[ch] = bg;
let rect: cc.Rect = new cc.Rect();
bg.rect = rect;
var img: string = buffer.readS();
rect.x = buffer.readInt();
rect.y = buffer.readInt();
bg.xOffset = buffer.readInt();
bg.yOffset = buffer.readInt();
rect.width = buffer.readInt();
rect.height = buffer.readInt();
bg.xAdvance = buffer.readInt();
bg.channel = buffer.readByte();
if (bg.channel == 1)
bg.channel = 3;
else if (bg.channel == 2)
bg.channel = 2;
else if (bg.channel == 3)
bg.channel = 1;
if (ttf) {
rect.x += mainSprite.rect.x;
rect.y += mainSprite.rect.y;
}
else {
let sprite: AtlasSprite = this._sprites[img];
if (sprite) {
rect.set(sprite.rect);
bg.xOffset += sprite.offset.x;
bg.yOffset += sprite.offset.y;
if (fontSize == 0)
fontSize = sprite.originalSize.height;
if (!mainTexture) {
sprite.atlas.load();
mainTexture = <cc.Texture2D>sprite.atlas.asset;
}
}
if (bg.xAdvance == 0) {
if (xadvance == 0)
bg.xAdvance = bg.xOffset + bg.rect.width;
else
bg.xAdvance = xadvance;
}
}
buffer.position = nextPos;
}
font.fontSize = fontSize;
font._fntConfig.fontSize = fontSize;
font._fntConfig.commonHeight = lineHeight == 0 ? fontSize : lineHeight;
font._fntConfig.resizable = resizable;
font._fntConfig.canTint = canTint;
let spriteFrame = new cc.SpriteFrame();
spriteFrame.setTexture(mainTexture);
font.spriteFrame = spriteFrame;
font.onLoad();
}
private loadSpine(item: PackageItem): void {
this._bundle.load(item.file, sp.SkeletonData, (err: Error, asset: cc.Asset) => {
item.decoded = true;
item.asset = asset;
let arr = item.loading;
delete item.loading;
arr.forEach(e => e(err, item));
});
}
private loadDragonBones(item: PackageItem): void {
this._bundle.load(item.file, dragonBones.DragonBonesAsset, (err: Error, asset: cc.Asset) => {
if (err) {
item.decoded = true;
let arr = item.loading;
delete item.loading;
arr.forEach(e => e(err, item));
return;
}
item.asset = asset;
let atlasFile = item.file.replace("_ske", "_tex");
let pos = atlasFile.lastIndexOf('.');
if (pos != -1)
atlasFile = atlasFile.substr(0, pos + 1) + "json";
this._bundle.load(atlasFile, dragonBones.DragonBonesAtlasAsset, (err: Error, asset: cc.Asset) => {
item.decoded = true;
item.atlasAsset = <dragonBones.DragonBonesAtlasAsset>asset;
let arr = item.loading;
delete item.loading;
arr.forEach(e => e(err, item));
});
});
}
}
interface AtlasSprite {
atlas: PackageItem;
rect: cc.Rect;
offset: cc.Vec2;
originalSize: cc.Size;
rotated?: boolean;
}
const ItemTypeToAssetType = {
[PackageItemType.Atlas]: cc.Texture2D,
[PackageItemType.Sound]: cc.AudioClip
};
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.